ngram
listlengths
0
67.8k
[ "fact here only does -GT[tgt]*Pred[tgt] Loss[tgt] = torch.mean( -torch.sum(GT[tgt]*Pred[tgt], dim=1)) return Loss class", "edict() Errs = edict() for tgt in tgts: cos_sim = self.cos_sim(Pred[tgt][mask], GT[tgt][mask]) Loss[tgt]", "nn.CosineSimilarity(dim=1).cuda() def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of target names. In", "# Pred[tgt][mask] (BxK, 4) Loss[tgt] = self.cross_entropy_loss(pr, gt).double() return Loss class Neg_Dot_Loss_Handler: def", "import torch import torch.nn as nn from torch.autograd import Variable import numpy as", "\"\"\" tgts: list of target names e.g. tgts=['a', 'e', 't'] GT : dict", "= nn.CrossEntropyLoss().cuda() # interface function def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list", "Pred, GT): \"\"\" tgts: list of target names. In this case has to", ") # In fact here only does -GT[tgt]*Pred[tgt] Loss[tgt] = torch.mean( -torch.sum(GT[tgt]*Pred[tgt], dim=1))", "names GT : dict of ground truth for each target BxHxW Pred: dict", "as np from easydict import EasyDict as edict from collections import OrderedDict as", "odict from itertools import product def eval_cls(Preds, GTs): acc = torch.mean((Preds==GTs).float()) return acc.item()", "\"\"\" Bug fixed on 22 Aug 2018 torch.dot can only be applied to", "= Pred[tgt][mask].view(gt.size(0),-1) # Pred[tgt][mask] (BxK, 4) Loss[tgt] = self.cross_entropy_loss(pr, gt).double() return Loss class", "return Loss, Errs class Smooth_L1_Loss_Handler: def __init__(self): self.smooth_l1_loss = nn.SmoothL1Loss().cuda() def compute_loss(self, tgts,", "tgts: list of target names e.g. tgts=['a', 'e', 't'] GT : dict of", "2018 torch.dot can only be applied to 1-dim tensor Don't know why there's", "# .clip(-1,1) return Loss, Errs class Smooth_L1_Loss_Handler: def __init__(self): self.smooth_l1_loss = nn.SmoothL1Loss().cuda() def", "dim=1)) return Loss class Cos_Proximity_Loss_Handler: def __init__(self): self.cos_sim = nn.CosineSimilarity(dim=1).cuda() def compute_loss(self, tgts,", "as nn from torch.autograd import Variable import numpy as np from easydict import", "compute_loss(self, tgts, Pred, GT): Loss = edict() for tgt in tgts: \"\"\" Bug", "__init__(self): self.cross_entropy_loss = nn.CrossEntropyLoss().cuda() # interface function def compute_loss(self, tgts, Pred, GT): \"\"\"", "of ground truth for each target BxHxWx3 Pred: dict of prediction for each", "\"\"\" mask = GT['mask'] Loss = edict() Errs = edict() for tgt in", "self.smooth_l1_loss = nn.SmoothL1Loss().cuda() def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of target", "names e.g. tgts=['a', 'e', 't'] GT : dict of ground truth for each", "tgts: Loss[tgt] = self.smooth_l1_loss(Pred[tgt], GT[tgt]) # [warning] pred first, gt second return Loss", "nn.CrossEntropyLoss().cuda() # interface function def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of", "Loss class Neg_Dot_Loss_Handler: def __init_(self): pass def compute_loss(self, tgts, Pred, GT): Loss =", "__init__(self): self.smooth_l1_loss = nn.SmoothL1Loss().cuda() def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of", "GTs): acc = torch.mean((Preds==GTs).float()) return acc.item() class Cross_Entropy_Loss_Handler: def __init__(self): self.cross_entropy_loss = nn.CrossEntropyLoss().cuda()", "acc = torch.mean((Preds==GTs).float()) return acc.item() class Cross_Entropy_Loss_Handler: def __init__(self): self.cross_entropy_loss = nn.CrossEntropyLoss().cuda() #", "pass def compute_loss(self, tgts, Pred, GT): Loss = edict() for tgt in tgts:", "import torch.nn as nn from torch.autograd import Variable import numpy as np from", "make loss as positive. Errs[tgt] = torch.acos(cos_sim.clamp(-1,1))*180./np.pi # .clip(-1,1) return Loss, Errs class", "GT): Loss = edict() for tgt in tgts: \"\"\" Bug fixed on 22", "torch import torch.nn as nn from torch.autograd import Variable import numpy as np", "torch.mean( 1 - cos_sim ) # use 1-cos(theta) to make loss as positive.", "1-cos(theta) to make loss as positive. Errs[tgt] = torch.acos(cos_sim.clamp(-1,1))*180./np.pi # .clip(-1,1) return Loss,", ".clip(-1,1) return Loss, Errs class Smooth_L1_Loss_Handler: def __init__(self): self.smooth_l1_loss = nn.SmoothL1Loss().cuda() def compute_loss(self,", "def __init__(self): self.cross_entropy_loss = nn.CrossEntropyLoss().cuda() # interface function def compute_loss(self, tgts, Pred, GT):", "to 1-dim tensor Don't know why there's no error. \"\"\" # Loss[tgt] =", "= self.cross_entropy_loss(pr, gt).double() return Loss class Neg_Dot_Loss_Handler: def __init_(self): pass def compute_loss(self, tgts,", "no error. \"\"\" # Loss[tgt] = torch.mean( -torch.dot(GT[tgt],Pred[tgt]) ) # In fact here", "target BxHxWx4 \"\"\" mask = GT['mask'] Loss = edict() for tgt in tgts:", "def __init__(self): self.cos_sim = nn.CosineSimilarity(dim=1).cuda() def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list", "Pred: dict of prediction for each target BxHxWx3 \"\"\" mask = GT['mask'] Loss", "= edict() Errs = edict() for tgt in tgts: cos_sim = self.cos_sim(Pred[tgt][mask], GT[tgt][mask])", "Smooth_L1_Loss_Handler: def __init__(self): self.smooth_l1_loss = nn.SmoothL1Loss().cuda() def compute_loss(self, tgts, Pred, GT): \"\"\" tgts:", "= torch.mean( 1 - cos_sim ) # use 1-cos(theta) to make loss as", "know why there's no error. \"\"\" # Loss[tgt] = torch.mean( -torch.dot(GT[tgt],Pred[tgt]) ) #", "'e', 't'] GT : dict of ground truth for each target Pred: dict", "def __init_(self): pass def compute_loss(self, tgts, Pred, GT): Loss = edict() for tgt", "class Cos_Proximity_Loss_Handler: def __init__(self): self.cos_sim = nn.CosineSimilarity(dim=1).cuda() def compute_loss(self, tgts, Pred, GT): \"\"\"", "# interface function def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of target", "-torch.sum(GT[tgt]*Pred[tgt], dim=1)) return Loss class Cos_Proximity_Loss_Handler: def __init__(self): self.cos_sim = nn.CosineSimilarity(dim=1).cuda() def compute_loss(self,", "coding: utf8 \"\"\" @Author : <NAME> \"\"\" import torch import torch.nn as nn", "from easydict import EasyDict as edict from collections import OrderedDict as odict from", "pr = Pred[tgt][mask].view(gt.size(0),-1) # Pred[tgt][mask] (BxK, 4) Loss[tgt] = self.cross_entropy_loss(pr, gt).double() return Loss", "each target BxHxWx3 \"\"\" mask = GT['mask'] Loss = edict() Errs = edict()", "# use 1-cos(theta) to make loss as positive. Errs[tgt] = torch.acos(cos_sim.clamp(-1,1))*180./np.pi # .clip(-1,1)", "torch.dot can only be applied to 1-dim tensor Don't know why there's no", "Pred: dict of prediction for each target \"\"\" Loss = edict() for tgt", "product def eval_cls(Preds, GTs): acc = torch.mean((Preds==GTs).float()) return acc.item() class Cross_Entropy_Loss_Handler: def __init__(self):", "class Neg_Dot_Loss_Handler: def __init_(self): pass def compute_loss(self, tgts, Pred, GT): Loss = edict()", "\"\"\" # Loss[tgt] = torch.mean( -torch.dot(GT[tgt],Pred[tgt]) ) # In fact here only does", "list of target names e.g. tgts=['a', 'e', 't'] GT : dict of ground", "for each target \"\"\" Loss = edict() for tgt in tgts: Loss[tgt] =", "BxHxWx3 \"\"\" mask = GT['mask'] Loss = edict() Errs = edict() for tgt", "ground truth for each target BxHxWx3 Pred: dict of prediction for each target", "1 - cos_sim ) # use 1-cos(theta) to make loss as positive. Errs[tgt]", "from torch.autograd import Variable import numpy as np from easydict import EasyDict as", "__init__(self): self.cos_sim = nn.CosineSimilarity(dim=1).cuda() def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of", "Pred, GT): Loss = edict() for tgt in tgts: \"\"\" Bug fixed on", "Aug 2018 torch.dot can only be applied to 1-dim tensor Don't know why", "<NAME> \"\"\" import torch import torch.nn as nn from torch.autograd import Variable import", "here only does -GT[tgt]*Pred[tgt] Loss[tgt] = torch.mean( -torch.sum(GT[tgt]*Pred[tgt], dim=1)) return Loss class Cos_Proximity_Loss_Handler:", "return Loss class Neg_Dot_Loss_Handler: def __init_(self): pass def compute_loss(self, tgts, Pred, GT): Loss", "nn from torch.autograd import Variable import numpy as np from easydict import EasyDict", "Loss = edict() for tgt in tgts: gt = GT[tgt][mask].view(-1) # as (BxK,)", "collections import OrderedDict as odict from itertools import product def eval_cls(Preds, GTs): acc", "mask = GT['mask'] Loss = edict() for tgt in tgts: gt = GT[tgt][mask].view(-1)", "Don't know why there's no error. \"\"\" # Loss[tgt] = torch.mean( -torch.dot(GT[tgt],Pred[tgt]) )", ": dict of ground truth for each target BxHxWx3 Pred: dict of prediction", "torch.nn as nn from torch.autograd import Variable import numpy as np from easydict", "only does -GT[tgt]*Pred[tgt] Loss[tgt] = torch.mean( -torch.sum(GT[tgt]*Pred[tgt], dim=1)) return Loss class Cos_Proximity_Loss_Handler: def", "ground truth for each target BxHxW Pred: dict of prediction for each target", "cos_sim = self.cos_sim(Pred[tgt][mask], GT[tgt][mask]) Loss[tgt] = torch.mean( 1 - cos_sim ) # use", "torch.acos(cos_sim.clamp(-1,1))*180./np.pi # .clip(-1,1) return Loss, Errs class Smooth_L1_Loss_Handler: def __init__(self): self.smooth_l1_loss = nn.SmoothL1Loss().cuda()", "e.g. tgts=['a', 'e', 't'] GT : dict of ground truth for each target", "target Pred: dict of prediction for each target \"\"\" Loss = edict() for", "Loss, Errs class Smooth_L1_Loss_Handler: def __init__(self): self.smooth_l1_loss = nn.SmoothL1Loss().cuda() def compute_loss(self, tgts, Pred,", "use 1-cos(theta) to make loss as positive. Errs[tgt] = torch.acos(cos_sim.clamp(-1,1))*180./np.pi # .clip(-1,1) return", "'t'] GT : dict of ground truth for each target Pred: dict of", "edict() for tgt in tgts: Loss[tgt] = self.smooth_l1_loss(Pred[tgt], GT[tgt]) # [warning] pred first,", "each target BxHxW Pred: dict of prediction for each target BxHxWx4 \"\"\" mask", "from itertools import product def eval_cls(Preds, GTs): acc = torch.mean((Preds==GTs).float()) return acc.item() class", "compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of target names e.g. tgts=['a', 'e',", "for tgt in tgts: Loss[tgt] = self.smooth_l1_loss(Pred[tgt], GT[tgt]) # [warning] pred first, gt", "gt).double() return Loss class Neg_Dot_Loss_Handler: def __init_(self): pass def compute_loss(self, tgts, Pred, GT):", "for each target BxHxW Pred: dict of prediction for each target BxHxWx4 \"\"\"", "dict of ground truth for each target BxHxW Pred: dict of prediction for", "list of target names. In this case has to be tgts=['norm'] GT :", "case has to be tgts=['norm'] GT : dict of ground truth for each", "be tgts=['norm'] GT : dict of ground truth for each target BxHxWx3 Pred:", "loss as positive. Errs[tgt] = torch.acos(cos_sim.clamp(-1,1))*180./np.pi # .clip(-1,1) return Loss, Errs class Smooth_L1_Loss_Handler:", "as positive. Errs[tgt] = torch.acos(cos_sim.clamp(-1,1))*180./np.pi # .clip(-1,1) return Loss, Errs class Smooth_L1_Loss_Handler: def", "\"\"\" tgts: list of target names GT : dict of ground truth for", "GT : dict of ground truth for each target BxHxW Pred: dict of", "only be applied to 1-dim tensor Don't know why there's no error. \"\"\"", "for each target Pred: dict of prediction for each target \"\"\" Loss =", "In this case has to be tgts=['norm'] GT : dict of ground truth", "truth for each target BxHxWx3 Pred: dict of prediction for each target BxHxWx3", "edict() for tgt in tgts: gt = GT[tgt][mask].view(-1) # as (BxK,) pr =", "= torch.mean((Preds==GTs).float()) return acc.item() class Cross_Entropy_Loss_Handler: def __init__(self): self.cross_entropy_loss = nn.CrossEntropyLoss().cuda() # interface", "Cross_Entropy_Loss_Handler: def __init__(self): self.cross_entropy_loss = nn.CrossEntropyLoss().cuda() # interface function def compute_loss(self, tgts, Pred,", "for tgt in tgts: cos_sim = self.cos_sim(Pred[tgt][mask], GT[tgt][mask]) Loss[tgt] = torch.mean( 1 -", "each target BxHxWx3 Pred: dict of prediction for each target BxHxWx3 \"\"\" mask", "target names e.g. tgts=['a', 'e', 't'] GT : dict of ground truth for", "torch.mean( -torch.sum(GT[tgt]*Pred[tgt], dim=1)) return Loss class Cos_Proximity_Loss_Handler: def __init__(self): self.cos_sim = nn.CosineSimilarity(dim=1).cuda() def", "why there's no error. \"\"\" # Loss[tgt] = torch.mean( -torch.dot(GT[tgt],Pred[tgt]) ) # In", "each target Pred: dict of prediction for each target \"\"\" Loss = edict()", "eval_cls(Preds, GTs): acc = torch.mean((Preds==GTs).float()) return acc.item() class Cross_Entropy_Loss_Handler: def __init__(self): self.cross_entropy_loss =", "import product def eval_cls(Preds, GTs): acc = torch.mean((Preds==GTs).float()) return acc.item() class Cross_Entropy_Loss_Handler: def", "Errs class Smooth_L1_Loss_Handler: def __init__(self): self.smooth_l1_loss = nn.SmoothL1Loss().cuda() def compute_loss(self, tgts, Pred, GT):", "GT[tgt][mask]) Loss[tgt] = torch.mean( 1 - cos_sim ) # use 1-cos(theta) to make", "\"\"\" mask = GT['mask'] Loss = edict() for tgt in tgts: gt =", "for tgt in tgts: gt = GT[tgt][mask].view(-1) # as (BxK,) pr = Pred[tgt][mask].view(gt.size(0),-1)", "as (BxK,) pr = Pred[tgt][mask].view(gt.size(0),-1) # Pred[tgt][mask] (BxK, 4) Loss[tgt] = self.cross_entropy_loss(pr, gt).double()", "tgts: gt = GT[tgt][mask].view(-1) # as (BxK,) pr = Pred[tgt][mask].view(gt.size(0),-1) # Pred[tgt][mask] (BxK,", "tgts, Pred, GT): Loss = edict() for tgt in tgts: \"\"\" Bug fixed", "prediction for each target BxHxWx3 \"\"\" mask = GT['mask'] Loss = edict() Errs", ") # use 1-cos(theta) to make loss as positive. Errs[tgt] = torch.acos(cos_sim.clamp(-1,1))*180./np.pi #", "of target names e.g. tgts=['a', 'e', 't'] GT : dict of ground truth", "acc.item() class Cross_Entropy_Loss_Handler: def __init__(self): self.cross_entropy_loss = nn.CrossEntropyLoss().cuda() # interface function def compute_loss(self,", "tgt in tgts: \"\"\" Bug fixed on 22 Aug 2018 torch.dot can only", "numpy as np from easydict import EasyDict as edict from collections import OrderedDict", "def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of target names e.g. tgts=['a',", "of ground truth for each target Pred: dict of prediction for each target", "Loss = edict() for tgt in tgts: Loss[tgt] = self.smooth_l1_loss(Pred[tgt], GT[tgt]) # [warning]", "(BxK, 4) Loss[tgt] = self.cross_entropy_loss(pr, gt).double() return Loss class Neg_Dot_Loss_Handler: def __init_(self): pass", "as odict from itertools import product def eval_cls(Preds, GTs): acc = torch.mean((Preds==GTs).float()) return", "import EasyDict as edict from collections import OrderedDict as odict from itertools import", "be applied to 1-dim tensor Don't know why there's no error. \"\"\" #", "does -GT[tgt]*Pred[tgt] Loss[tgt] = torch.mean( -torch.sum(GT[tgt]*Pred[tgt], dim=1)) return Loss class Cos_Proximity_Loss_Handler: def __init__(self):", "def eval_cls(Preds, GTs): acc = torch.mean((Preds==GTs).float()) return acc.item() class Cross_Entropy_Loss_Handler: def __init__(self): self.cross_entropy_loss", "each target BxHxWx4 \"\"\" mask = GT['mask'] Loss = edict() for tgt in", "self.cos_sim(Pred[tgt][mask], GT[tgt][mask]) Loss[tgt] = torch.mean( 1 - cos_sim ) # use 1-cos(theta) to", "target \"\"\" Loss = edict() for tgt in tgts: Loss[tgt] = self.smooth_l1_loss(Pred[tgt], GT[tgt])", "OrderedDict as odict from itertools import product def eval_cls(Preds, GTs): acc = torch.mean((Preds==GTs).float())", "= GT['mask'] Loss = edict() for tgt in tgts: gt = GT[tgt][mask].view(-1) #", "# as (BxK,) pr = Pred[tgt][mask].view(gt.size(0),-1) # Pred[tgt][mask] (BxK, 4) Loss[tgt] = self.cross_entropy_loss(pr,", "import Variable import numpy as np from easydict import EasyDict as edict from", "return acc.item() class Cross_Entropy_Loss_Handler: def __init__(self): self.cross_entropy_loss = nn.CrossEntropyLoss().cuda() # interface function def", "import numpy as np from easydict import EasyDict as edict from collections import", "list of target names GT : dict of ground truth for each target", "self.cross_entropy_loss(pr, gt).double() return Loss class Neg_Dot_Loss_Handler: def __init_(self): pass def compute_loss(self, tgts, Pred,", "there's no error. \"\"\" # Loss[tgt] = torch.mean( -torch.dot(GT[tgt],Pred[tgt]) ) # In fact", "Loss[tgt] = torch.mean( -torch.dot(GT[tgt],Pred[tgt]) ) # In fact here only does -GT[tgt]*Pred[tgt] Loss[tgt]", "tensor Don't know why there's no error. \"\"\" # Loss[tgt] = torch.mean( -torch.dot(GT[tgt],Pred[tgt])", ": dict of ground truth for each target Pred: dict of prediction for", "dict of ground truth for each target Pred: dict of prediction for each", "<filename>S2.Surface_Normal/lib/helper.py # coding: utf8 \"\"\" @Author : <NAME> \"\"\" import torch import torch.nn", "= torch.mean( -torch.sum(GT[tgt]*Pred[tgt], dim=1)) return Loss class Cos_Proximity_Loss_Handler: def __init__(self): self.cos_sim = nn.CosineSimilarity(dim=1).cuda()", "cos_sim ) # use 1-cos(theta) to make loss as positive. Errs[tgt] = torch.acos(cos_sim.clamp(-1,1))*180./np.pi", "Loss = edict() Errs = edict() for tgt in tgts: cos_sim = self.cos_sim(Pred[tgt][mask],", "def compute_loss(self, tgts, Pred, GT): Loss = edict() for tgt in tgts: \"\"\"", "for tgt in tgts: \"\"\" Bug fixed on 22 Aug 2018 torch.dot can", "function def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of target names GT", "has to be tgts=['norm'] GT : dict of ground truth for each target", "prediction for each target \"\"\" Loss = edict() for tgt in tgts: Loss[tgt]", "4) Loss[tgt] = self.cross_entropy_loss(pr, gt).double() return Loss class Neg_Dot_Loss_Handler: def __init_(self): pass def", "Neg_Dot_Loss_Handler: def __init_(self): pass def compute_loss(self, tgts, Pred, GT): Loss = edict() for", "GT): \"\"\" tgts: list of target names e.g. tgts=['a', 'e', 't'] GT :", "GT['mask'] Loss = edict() for tgt in tgts: gt = GT[tgt][mask].view(-1) # as", "in tgts: gt = GT[tgt][mask].view(-1) # as (BxK,) pr = Pred[tgt][mask].view(gt.size(0),-1) # Pred[tgt][mask]", "Errs[tgt] = torch.acos(cos_sim.clamp(-1,1))*180./np.pi # .clip(-1,1) return Loss, Errs class Smooth_L1_Loss_Handler: def __init__(self): self.smooth_l1_loss", "dict of prediction for each target BxHxWx3 \"\"\" mask = GT['mask'] Loss =", "torch.mean((Preds==GTs).float()) return acc.item() class Cross_Entropy_Loss_Handler: def __init__(self): self.cross_entropy_loss = nn.CrossEntropyLoss().cuda() # interface function", "1-dim tensor Don't know why there's no error. \"\"\" # Loss[tgt] = torch.mean(", "Loss[tgt] = torch.mean( -torch.sum(GT[tgt]*Pred[tgt], dim=1)) return Loss class Cos_Proximity_Loss_Handler: def __init__(self): self.cos_sim =", "of ground truth for each target BxHxW Pred: dict of prediction for each", "# Loss[tgt] = torch.mean( -torch.dot(GT[tgt],Pred[tgt]) ) # In fact here only does -GT[tgt]*Pred[tgt]", "torch.mean( -torch.dot(GT[tgt],Pred[tgt]) ) # In fact here only does -GT[tgt]*Pred[tgt] Loss[tgt] = torch.mean(", "Loss = edict() for tgt in tgts: \"\"\" Bug fixed on 22 Aug", "class Smooth_L1_Loss_Handler: def __init__(self): self.smooth_l1_loss = nn.SmoothL1Loss().cuda() def compute_loss(self, tgts, Pred, GT): \"\"\"", "dict of prediction for each target \"\"\" Loss = edict() for tgt in", "target names GT : dict of ground truth for each target BxHxW Pred:", "Loss[tgt] = self.cross_entropy_loss(pr, gt).double() return Loss class Neg_Dot_Loss_Handler: def __init_(self): pass def compute_loss(self,", "easydict import EasyDict as edict from collections import OrderedDict as odict from itertools", "def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of target names. In this", "fixed on 22 Aug 2018 torch.dot can only be applied to 1-dim tensor", "target BxHxW Pred: dict of prediction for each target BxHxWx4 \"\"\" mask =", "tgts, Pred, GT): \"\"\" tgts: list of target names. In this case has", "tgt in tgts: cos_sim = self.cos_sim(Pred[tgt][mask], GT[tgt][mask]) Loss[tgt] = torch.mean( 1 - cos_sim", "prediction for each target BxHxWx4 \"\"\" mask = GT['mask'] Loss = edict() for", "= edict() for tgt in tgts: cos_sim = self.cos_sim(Pred[tgt][mask], GT[tgt][mask]) Loss[tgt] = torch.mean(", "tgts: list of target names GT : dict of ground truth for each", "each target \"\"\" Loss = edict() for tgt in tgts: Loss[tgt] = self.smooth_l1_loss(Pred[tgt],", "Loss[tgt] = torch.mean( 1 - cos_sim ) # use 1-cos(theta) to make loss", "= GT[tgt][mask].view(-1) # as (BxK,) pr = Pred[tgt][mask].view(gt.size(0),-1) # Pred[tgt][mask] (BxK, 4) Loss[tgt]", "target BxHxWx3 Pred: dict of prediction for each target BxHxWx3 \"\"\" mask =", "\"\"\" import torch import torch.nn as nn from torch.autograd import Variable import numpy", "tgts: \"\"\" Bug fixed on 22 Aug 2018 torch.dot can only be applied", "def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of target names GT :", "in tgts: \"\"\" Bug fixed on 22 Aug 2018 torch.dot can only be", "= GT['mask'] Loss = edict() Errs = edict() for tgt in tgts: cos_sim", "edict from collections import OrderedDict as odict from itertools import product def eval_cls(Preds,", "error. \"\"\" # Loss[tgt] = torch.mean( -torch.dot(GT[tgt],Pred[tgt]) ) # In fact here only", "-torch.dot(GT[tgt],Pred[tgt]) ) # In fact here only does -GT[tgt]*Pred[tgt] Loss[tgt] = torch.mean( -torch.sum(GT[tgt]*Pred[tgt],", "tgts, Pred, GT): \"\"\" tgts: list of target names GT : dict of", "@Author : <NAME> \"\"\" import torch import torch.nn as nn from torch.autograd import", "# In fact here only does -GT[tgt]*Pred[tgt] Loss[tgt] = torch.mean( -torch.sum(GT[tgt]*Pred[tgt], dim=1)) return", "\"\"\" Loss = edict() for tgt in tgts: Loss[tgt] = self.smooth_l1_loss(Pred[tgt], GT[tgt]) #", "for each target BxHxWx3 Pred: dict of prediction for each target BxHxWx3 \"\"\"", "Pred, GT): \"\"\" tgts: list of target names GT : dict of ground", "tgts, Pred, GT): \"\"\" tgts: list of target names e.g. tgts=['a', 'e', 't']", "= self.cos_sim(Pred[tgt][mask], GT[tgt][mask]) Loss[tgt] = torch.mean( 1 - cos_sim ) # use 1-cos(theta)", "in tgts: Loss[tgt] = self.smooth_l1_loss(Pred[tgt], GT[tgt]) # [warning] pred first, gt second return", "in tgts: cos_sim = self.cos_sim(Pred[tgt][mask], GT[tgt][mask]) Loss[tgt] = torch.mean( 1 - cos_sim )", "Cos_Proximity_Loss_Handler: def __init__(self): self.cos_sim = nn.CosineSimilarity(dim=1).cuda() def compute_loss(self, tgts, Pred, GT): \"\"\" tgts:", "this case has to be tgts=['norm'] GT : dict of ground truth for", "= torch.mean( -torch.dot(GT[tgt],Pred[tgt]) ) # In fact here only does -GT[tgt]*Pred[tgt] Loss[tgt] =", "tgt in tgts: Loss[tgt] = self.smooth_l1_loss(Pred[tgt], GT[tgt]) # [warning] pred first, gt second", "of target names. In this case has to be tgts=['norm'] GT : dict", "of prediction for each target BxHxWx3 \"\"\" mask = GT['mask'] Loss = edict()", "= nn.SmoothL1Loss().cuda() def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of target names", "gt = GT[tgt][mask].view(-1) # as (BxK,) pr = Pred[tgt][mask].view(gt.size(0),-1) # Pred[tgt][mask] (BxK, 4)", "GT : dict of ground truth for each target Pred: dict of prediction", "Loss class Cos_Proximity_Loss_Handler: def __init__(self): self.cos_sim = nn.CosineSimilarity(dim=1).cuda() def compute_loss(self, tgts, Pred, GT):", "compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of target names. In this case", "target names. In this case has to be tgts=['norm'] GT : dict of", "itertools import product def eval_cls(Preds, GTs): acc = torch.mean((Preds==GTs).float()) return acc.item() class Cross_Entropy_Loss_Handler:", "import OrderedDict as odict from itertools import product def eval_cls(Preds, GTs): acc =", "Pred[tgt][mask] (BxK, 4) Loss[tgt] = self.cross_entropy_loss(pr, gt).double() return Loss class Neg_Dot_Loss_Handler: def __init_(self):", "for each target BxHxWx3 \"\"\" mask = GT['mask'] Loss = edict() Errs =", "return Loss class Cos_Proximity_Loss_Handler: def __init__(self): self.cos_sim = nn.CosineSimilarity(dim=1).cuda() def compute_loss(self, tgts, Pred,", "edict() for tgt in tgts: cos_sim = self.cos_sim(Pred[tgt][mask], GT[tgt][mask]) Loss[tgt] = torch.mean( 1", "ground truth for each target Pred: dict of prediction for each target \"\"\"", "Errs = edict() for tgt in tgts: cos_sim = self.cos_sim(Pred[tgt][mask], GT[tgt][mask]) Loss[tgt] =", "for each target BxHxWx4 \"\"\" mask = GT['mask'] Loss = edict() for tgt", "__init_(self): pass def compute_loss(self, tgts, Pred, GT): Loss = edict() for tgt in", "EasyDict as edict from collections import OrderedDict as odict from itertools import product", "GT[tgt][mask].view(-1) # as (BxK,) pr = Pred[tgt][mask].view(gt.size(0),-1) # Pred[tgt][mask] (BxK, 4) Loss[tgt] =", "Bug fixed on 22 Aug 2018 torch.dot can only be applied to 1-dim", "- cos_sim ) # use 1-cos(theta) to make loss as positive. Errs[tgt] =", "tgt in tgts: gt = GT[tgt][mask].view(-1) # as (BxK,) pr = Pred[tgt][mask].view(gt.size(0),-1) #", "as edict from collections import OrderedDict as odict from itertools import product def", "self.cos_sim = nn.CosineSimilarity(dim=1).cuda() def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of target", "= edict() for tgt in tgts: Loss[tgt] = self.smooth_l1_loss(Pred[tgt], GT[tgt]) # [warning] pred", "Pred, GT): \"\"\" tgts: list of target names e.g. tgts=['a', 'e', 't'] GT", "dict of ground truth for each target BxHxWx3 Pred: dict of prediction for", "can only be applied to 1-dim tensor Don't know why there's no error.", "22 Aug 2018 torch.dot can only be applied to 1-dim tensor Don't know", "Pred: dict of prediction for each target BxHxWx4 \"\"\" mask = GT['mask'] Loss", "GT['mask'] Loss = edict() Errs = edict() for tgt in tgts: cos_sim =", "nn.SmoothL1Loss().cuda() def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of target names e.g.", ": <NAME> \"\"\" import torch import torch.nn as nn from torch.autograd import Variable", "of prediction for each target BxHxWx4 \"\"\" mask = GT['mask'] Loss = edict()", "BxHxWx3 Pred: dict of prediction for each target BxHxWx3 \"\"\" mask = GT['mask']", "positive. Errs[tgt] = torch.acos(cos_sim.clamp(-1,1))*180./np.pi # .clip(-1,1) return Loss, Errs class Smooth_L1_Loss_Handler: def __init__(self):", "on 22 Aug 2018 torch.dot can only be applied to 1-dim tensor Don't", "\"\"\" @Author : <NAME> \"\"\" import torch import torch.nn as nn from torch.autograd", "Pred[tgt][mask].view(gt.size(0),-1) # Pred[tgt][mask] (BxK, 4) Loss[tgt] = self.cross_entropy_loss(pr, gt).double() return Loss class Neg_Dot_Loss_Handler:", "compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of target names GT : dict", "\"\"\" tgts: list of target names. In this case has to be tgts=['norm']", "to be tgts=['norm'] GT : dict of ground truth for each target BxHxWx3", "GT): \"\"\" tgts: list of target names GT : dict of ground truth", "utf8 \"\"\" @Author : <NAME> \"\"\" import torch import torch.nn as nn from", "BxHxWx4 \"\"\" mask = GT['mask'] Loss = edict() for tgt in tgts: gt", "tgts: cos_sim = self.cos_sim(Pred[tgt][mask], GT[tgt][mask]) Loss[tgt] = torch.mean( 1 - cos_sim ) #", "In fact here only does -GT[tgt]*Pred[tgt] Loss[tgt] = torch.mean( -torch.sum(GT[tgt]*Pred[tgt], dim=1)) return Loss", "-GT[tgt]*Pred[tgt] Loss[tgt] = torch.mean( -torch.sum(GT[tgt]*Pred[tgt], dim=1)) return Loss class Cos_Proximity_Loss_Handler: def __init__(self): self.cos_sim", "def __init__(self): self.smooth_l1_loss = nn.SmoothL1Loss().cuda() def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list", "Variable import numpy as np from easydict import EasyDict as edict from collections", "edict() for tgt in tgts: \"\"\" Bug fixed on 22 Aug 2018 torch.dot", "dict of prediction for each target BxHxWx4 \"\"\" mask = GT['mask'] Loss =", "tgts: list of target names. In this case has to be tgts=['norm'] GT", "of prediction for each target \"\"\" Loss = edict() for tgt in tgts:", "# coding: utf8 \"\"\" @Author : <NAME> \"\"\" import torch import torch.nn as", "BxHxW Pred: dict of prediction for each target BxHxWx4 \"\"\" mask = GT['mask']", "mask = GT['mask'] Loss = edict() Errs = edict() for tgt in tgts:", "GT): \"\"\" tgts: list of target names. In this case has to be", "applied to 1-dim tensor Don't know why there's no error. \"\"\" # Loss[tgt]", "tgts=['norm'] GT : dict of ground truth for each target BxHxWx3 Pred: dict", "interface function def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of target names", "from collections import OrderedDict as odict from itertools import product def eval_cls(Preds, GTs):", "of target names GT : dict of ground truth for each target BxHxW", "= torch.acos(cos_sim.clamp(-1,1))*180./np.pi # .clip(-1,1) return Loss, Errs class Smooth_L1_Loss_Handler: def __init__(self): self.smooth_l1_loss =", "to make loss as positive. Errs[tgt] = torch.acos(cos_sim.clamp(-1,1))*180./np.pi # .clip(-1,1) return Loss, Errs", "torch.autograd import Variable import numpy as np from easydict import EasyDict as edict", "tgts=['a', 'e', 't'] GT : dict of ground truth for each target Pred:", "truth for each target Pred: dict of prediction for each target \"\"\" Loss", "np from easydict import EasyDict as edict from collections import OrderedDict as odict", "target BxHxWx3 \"\"\" mask = GT['mask'] Loss = edict() Errs = edict() for", "= edict() for tgt in tgts: \"\"\" Bug fixed on 22 Aug 2018", "= nn.CosineSimilarity(dim=1).cuda() def compute_loss(self, tgts, Pred, GT): \"\"\" tgts: list of target names.", "names. In this case has to be tgts=['norm'] GT : dict of ground", "class Cross_Entropy_Loss_Handler: def __init__(self): self.cross_entropy_loss = nn.CrossEntropyLoss().cuda() # interface function def compute_loss(self, tgts,", ": dict of ground truth for each target BxHxW Pred: dict of prediction", "(BxK,) pr = Pred[tgt][mask].view(gt.size(0),-1) # Pred[tgt][mask] (BxK, 4) Loss[tgt] = self.cross_entropy_loss(pr, gt).double() return", "GT : dict of ground truth for each target BxHxWx3 Pred: dict of", "self.cross_entropy_loss = nn.CrossEntropyLoss().cuda() # interface function def compute_loss(self, tgts, Pred, GT): \"\"\" tgts:", "truth for each target BxHxW Pred: dict of prediction for each target BxHxWx4", "= edict() for tgt in tgts: gt = GT[tgt][mask].view(-1) # as (BxK,) pr" ]
[ "2.)**2)) @registry.register def sphere4(x: np.ndarray) -> float: \"\"\"Even more translated sphere function.\"\"\" return", "i in range(problemDimensions): firstSum += (x[i]-mu1)**2 secondSum += (x[i]-mu2)**2 thirdSum += 1.0 -", "1, and returns len(x) - number of ones.. It also works in the", "with 4 thresholds (quantiles of Gaussian).\"\"\" return _onemax(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardjump5(y: np.ndarray)", "leadingones([1 0 0 0]) = 1. \"\"\" for i, x_ in enumerate(list(x)): if", "classical example is ellipsoid. \"\"\" return float(x[0]**2 + 1000000. * np.sum(x[1:]**2)) @registry.register def", "0.0 thirdSum = 0.0 for i in range(problemDimensions): firstSum += (x[i]-mu1)**2 secondSum +=", "_leadingones(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardonemax5(y: np.ndarray) -> float: \"\"\"Hardonemax, with a discretization by 5", "0. angle = np.arctan(x[0] / x[1]) if x[1] != 0. else np.pi /", "out algorithms not invariant to the order of variables.\"\"\" return float(x[-1]**2 + 1000000.", "len(x) m = n // 4 o = n - _onemax(x) if o", "1) / float(len(x) - 1))) * (x[i]**2) for i in range(len(x))) @registry.register def", "1 1 1]) = 0, leadingones([1 0 0 0]) = 1. \"\"\" for", "\"\"\" for i, x_ in enumerate(list(x)): if int(round(x_)) != 1: return len(x) -", "discretization of leadingones (This multiplies the dimension by 2).\"\"\" return _leadingones(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def", "example of ill conditioned function. The other classical example is cigar. \"\"\" return", "returns len(x) - number of ones.. It also works in the continuous case", "sumdeceptive(x: np.ndarray) -> float: dec = 3 * x**2 - (2 / (3**(x", ">= 2 return float(max(np.abs(np.arctan(x[1]/x[0])), np.sqrt(x[0]**2. + x[1]**2.), 1. if x[0] > 0 else", "with translation/rotation @registry.register_with_info(no_transfrom=True) def hardonemax(y: np.ndarray) -> float: \"\"\"Onemax, with a discretization in", "integration, tested in optim because why not.\"\"\" return -float(np.exp(-sum(x**2 / 4.))) @registry.register def", "return a positive value for maximization return float(39.16599 * len(x) + 1 *", "np.ndarray) -> float: \"\"\"New multimodal function (proposed for Nevergrad).\"\"\" return float(np.sum((x**2) * (1.1", "def hardonemax(y: np.ndarray) -> float: \"\"\"Onemax, with a discretization in 2 by threshold", "1000000. * np.sum(x[:-1]**2)) @registry.register def cigar(x: np.ndarray) -> float: \"\"\"Classical example of ill", "used in integration, tested in optim because why not.\"\"\" return -float(genzcornerpeak(y)) @registry.register def", "'''For asynchronous experiments, we induce delays.''' time.sleep(abs(1./x[0]) / 100000. if x[0] != 0.", "with a discretization by 5 with 4 thresholds (quantiles of Gaussian).\"\"\" return _leadingones(discretization.threshold_discretization(y,", "np.ndarray, noise: float) -> float: \"\"\"Classical function for testing noisy optimization.\"\"\" x =", "@registry.register def sumdeceptive(x: np.ndarray) -> float: dec = 3 * x**2 - (2", "= - np.sqrt(abs((mu1**2 - 1.0) / s)) firstSum = 0.0 secondSum = 0.0", "optim because why not.\"\"\" return -float(genzcornerpeak(y)) @registry.register def genzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One", "* np.sqrt(problemDimensions + 20.0) - 8.2)) mu1 = 2.5 mu2 = - np.sqrt(abs((mu1**2", "This multiplies the dimension by 5.\"\"\" return _leadingones(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def genzcornerpeak(y: np.ndarray)", "s = 1.0 - (1.0 / (2.0 * np.sqrt(problemDimensions + 20.0) - 8.2))", "onemax (This multiplies the dimension by 2).\"\"\" return _onemax(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def jump(y: np.ndarray)", "x = np.asarray(x) val = np.sum(np.power(x, 4) - 16 * np.power(x, 2) +", "why not.\"\"\" value = float(1 + np.mean(np.tanh(y))) if value == 0: return float(\"inf\")", "def __call__(self, x: np.ndarray) -> float: return float(np.sum(x**2)) def get_postponing_delay(self, args: Tuple[Any, ...],", "because why not.\"\"\" return float(np.exp(-np.sum(x**2 / 4.))) @registry.register def minusgenzgaussianpeakintegral(x: np.ndarray) -> float:", "return float(x[-1]**2 + 1000000. * np.sum(x[:-1]**2)) @registry.register def cigar(x: np.ndarray) -> float: \"\"\"Classical", "\"\"\"Similar to Ellipsoid, but variables in inverse order. E.g. for pointing out algorithms", "Based on https://www.cs.unm.edu/~neal.holts/dga/benchmarkFunction/lunacek.html.\"\"\" problemDimensions = len(x) s = 1.0 - (1.0 / (2.0", "algorithms not invariant to the order of variables.\"\"\" return float(x[-1]**2 + 1000000. *", "float(np.sum((x - 1.)**2)) @registry.register def sphere2(x: np.ndarray) -> float: \"\"\"A bit more translated", "deceptivepath(x: np.ndarray) -> float: \"\"\"A function which needs following a long path. Most", "0 def _jump(x: List[int]) -> float: # TODO: docstring? \"\"\"There exists variants of", "multiplies the dimension by 2).\"\"\" return _onemax(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def jump(y: np.ndarray) -> float:", "number of 1, and returns len(x) - number of ones.. It also works", "float(np.sum(x**2)) class DelayedSphere(PostponedObject): def __call__(self, x: np.ndarray) -> float: return float(np.sum(x**2)) def get_postponing_delay(self,", "x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0) @registry.register def griewank(x: np.ndarray) -> float: \"\"\"Multimodal function,", "of jump functions; we are in minimization. The principle of a jump function", "mu2 = - np.sqrt(abs((mu1**2 - 1.0) / s)) firstSum = 0.0 secondSum =", "if x[0] != 0. else 0.) return float(np.sum(x**2)) class DelayedSphere(PostponedObject): def __call__(self, x:", "Deceptive part. def _styblinksitang(x: np.ndarray, noise: float) -> float: \"\"\"Classical function for testing", "is the second most classical discrete function, adapted for minimization. Returns len(x) -", "in optim because why not.\"\"\" return float(np.exp(-np.sum(x**2 / 4.))) @registry.register def minusgenzgaussianpeakintegral(x: np.ndarray)", "np.ndarray) -> float: \"\"\"Softmax discretization of jump (This multiplies the dimension by 2).\"\"\"", "+ (1 - x[:-1])**2.0) @registry.register def griewank(x: np.ndarray) -> float: \"\"\"Multimodal function, often", "a jump function is that local descent does not succeed. Jumps are necessary.", "optim because why not.\"\"\" return float(np.exp(-np.sum(x**2 / 4.))) @registry.register def minusgenzgaussianpeakintegral(x: np.ndarray) ->", "in optim because why not.\"\"\" value = float(1 + np.mean(np.tanh(y))) if value ==", "Any], value: float) -> float: x = args[0] return float(abs(1./x[0]) / 1000.) if", "bug.\"\"\" return float(np.sum(x**2)) @registry.register def sphere1(x: np.ndarray) -> float: \"\"\"Translated sphere function.\"\"\" return", "@registry.register def sphere4(x: np.ndarray) -> float: \"\"\"Even more translated sphere function.\"\"\" return float(np.sum((x", "with 5 possibles values. This multiplies the dimension by 5.\"\"\" return _onemax(discretization.softmax_discretization(y, 5))", "Genz functions, originally used in integration, tested in optim because why not.\"\"\" return", "5.\"\"\" return _leadingones(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def genzcornerpeak(y: np.ndarray) -> float: \"\"\"One of the", "def _onemax(x: List[int]) -> float: \"\"\"onemax(x) is the most classical case of discrete", "\"\"\"A bit more translated sphere function.\"\"\" return float(np.sum((x - 2.)**2)) @registry.register def sphere4(x:", "1 else 0 for w in x) def _leadingones(x: List[int]) -> float: \"\"\"leadingones", "* np.power(x, 2) + 5 * x) # return a positive value for", "np.sqrt(1 + np.arange(len(x))))) return 1 + (float(part1)/4000.0) - float(part2) @registry.register def deceptiveillcond(x: np.ndarray)", "return len(x) - sum(1 if int(round(w)) == 1 else 0 for w in", "+ np.mean(np.tanh(y))) if value == 0: return float(\"inf\") return value**(-len(y) - 1) @registry.register_with_info(no_transfrom=True)", "# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. # #", "not be used with translation/rotation @registry.register_with_info(no_transfrom=True) def hardonemax(y: np.ndarray) -> float: \"\"\"Onemax, with", "@registry.register_with_info(no_transfrom=True) def onemax5(y: np.ndarray) -> float: \"\"\"Softmax discretization of onemax with 5 possibles", "optimum.\"\"\" assert len(x) >= 2 distance = np.sqrt(x[0]**2 + x[1]**2) if distance ==", "0.1: return 1. return float(distance) @registry.register def deceptivemultimodal(x: np.ndarray) -> float: \"\"\"Infinitely many", "sphere1(x: np.ndarray) -> float: \"\"\"Translated sphere function.\"\"\" return float(np.sum((x - 1.)**2)) @registry.register def", "not.\"\"\" return -float(genzcornerpeak(y)) @registry.register def genzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One of the Genz", "float(39.16599 * len(x) + 1 * 0.5 * val + noise * np.random.normal(size=val.shape))", "np.ndarray) -> float: \"\"\"A bit more translated sphere function.\"\"\" return float(np.sum((x - 2.)**2))", "1.)**2)) @registry.register def sphere2(x: np.ndarray) -> float: \"\"\"A bit more translated sphere function.\"\"\"", "np.abs(np.cos(invdistance) - angle) > 0.1: return 1. return float(distance) @registry.register def deceptivemultimodal(x: np.ndarray)", "multimodal function.\"\"\" cosi = float(np.sum(np.cos(2 * np.pi * x))) return float(10 * (len(x)", "- x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0) @registry.register def griewank(x: np.ndarray) -> float: \"\"\"Multimodal", "+ 1000000. * np.sum(x[:-1]**2)) @registry.register def cigar(x: np.ndarray) -> float: \"\"\"Classical example of", "0 (>0 or <0).\"\"\" return _leadingones(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardonemax5(y: np.ndarray) -> float: \"\"\"Hardonemax,", "= len(x) m = n // 4 o = n - _onemax(x) if", "Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. # # This", "\"\"\"Classical function for testing noisy optimization.\"\"\" x = np.asarray(x) val = np.sum(np.power(x, 4)", "np.ndarray) -> float: return sum(100.0*(x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0) @registry.register def", "of this source tree. import time from typing import Dict, Any, Tuple, List,", "i in range(len(x))) @registry.register def rastrigin(x: np.ndarray) -> float: \"\"\"Classical multimodal function.\"\"\" cosi", "tested in optim because why not.\"\"\" return -float(genzcornerpeak(y)) @registry.register def genzgaussianpeakintegral(x: np.ndarray) ->", "(float(part1)/4000.0) - float(part2) @registry.register def deceptiveillcond(x: np.ndarray) -> float: \"\"\"An extreme ill conditioned", "float(len(x) - 1))) * (x[i]**2) for i in range(len(x))) @registry.register def rastrigin(x: np.ndarray)", "linear(x: np.ndarray) -> float: return float(np.tanh(x[0])) @registry.register def st0(x: np.ndarray) -> float: \"\"\"Styblinksitang", "classical continuous optimization testbed. If you do not solve that one then you", "other classical example is cigar. \"\"\" return sum((10**(6 * (i - 1) /", "def onemax(y: np.ndarray) -> float: \"\"\"Softmax discretization of onemax (This multiplies the dimension", "used with translation/rotation @registry.register_with_info(no_transfrom=True) def hardonemax(y: np.ndarray) -> float: \"\"\"Onemax, with a discretization", "x_ in enumerate(list(x)): if int(round(x_)) != 1: return len(x) - i return 0", "noise 10.\"\"\" return _styblinksitang(x, 10) @registry.register def st100(x: np.ndarray) -> float: \"\"\"Styblinksitang function", "in range(problemDimensions): firstSum += (x[i]-mu1)**2 secondSum += (x[i]-mu2)**2 thirdSum += 1.0 - np.cos(2*np.pi*(x[i]-mu1))", "np.random.normal(size=val.shape)) @registry.register def delayedsphere(x: np.ndarray) -> float: '''For asynchronous experiments, we induce delays.'''", "problemDimensions = len(x) s = 1.0 - (1.0 / (2.0 * np.sqrt(problemDimensions +", "def deceptivepath(x: np.ndarray) -> float: \"\"\"A function which needs following a long path.", "\"\"\"Styblinksitang function with 0 noise.\"\"\" return _styblinksitang(x, 0) @registry.register def st1(x: np.ndarray) ->", "deceptivemultimodal(x: np.ndarray) -> float: \"\"\"Infinitely many local optima, as we get closer to", "_jump(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardleadingones5(y: np.ndarray) -> float: \"\"\"Leadingones, with a discretization by", "x[0] > 0 else 0.) if x[0] != 0. else float(\"inf\")) @registry.register def", "originally used in integration, tested in optim because why not.\"\"\" return -float(genzcornerpeak(y)) @registry.register", "sphere(x)) @registry.register def hm(x: np.ndarray) -> float: \"\"\"New multimodal function (proposed for Nevergrad).\"\"\"", "function, often used in Bayesian optimization.\"\"\" part1 = np.sum(x**2) part2 = np.prod(np.cos(x /", "float(np.sum((x - 2.)**2)) @registry.register def sphere4(x: np.ndarray) -> float: \"\"\"Even more translated sphere", "Most algorithms fail on this. The path becomes thiner as we get closer", "* (len(x) - cosi) + sphere(x)) @registry.register def hm(x: np.ndarray) -> float: \"\"\"New", "-> float: \"\"\"Translated sphere function.\"\"\" return float(np.sum((x - 1.)**2)) @registry.register def sphere2(x: np.ndarray)", "\"\"\"Styblinksitang function with noise 1.\"\"\" return _styblinksitang(x, 1) @registry.register def st10(x: np.ndarray) ->", "with noise 10.\"\"\" return _styblinksitang(x, 10) @registry.register def st100(x: np.ndarray) -> float: \"\"\"Styblinksitang", "on this. The path becomes thiner as we get closer to the optimum.\"\"\"", "(c) Facebook, Inc. and its affiliates. All Rights Reserved. # # This source", "with a discretization in 2 by threshold 0 (>0 or <0).\"\"\" return _jump(discretization.threshold_discretization(y))", "get closer to the optimum.\"\"\" assert len(x) >= 2 distance = np.sqrt(x[0]**2 +", "in enumerate(list(x)): if int(round(x_)) != 1: return len(x) - i return 0 def", "s)) firstSum = 0.0 secondSum = 0.0 thirdSum = 0.0 for i in", "float: \"\"\"Styblinksitang function with noise 10.\"\"\" return _styblinksitang(x, 10) @registry.register def st100(x: np.ndarray)", "- 1.)**2)) @registry.register def sphere2(x: np.ndarray) -> float: \"\"\"A bit more translated sphere", "or <0).\"\"\" return _onemax(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardjump(y: np.ndarray) -> float: \"\"\"Hardjump, with a", "float: \"\"\"Leading ones, with a discretization in 2 by threshold 0 (>0 or", "function with noise 10.\"\"\" return _styblinksitang(x, 10) @registry.register def st100(x: np.ndarray) -> float:", "hardonemax5(y: np.ndarray) -> float: \"\"\"Hardonemax, with a discretization by 5 with 4 thresholds", "in integration, tested in optim because why not.\"\"\" return float(np.exp(-np.sum(x**2 / 4.))) @registry.register", "-float(genzcornerpeak(y)) @registry.register def genzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One of the Genz functions, originally", "0 else 0.) if x[0] != 0. else float(\"inf\")) @registry.register def deceptivepath(x: np.ndarray)", "typing import Dict, Any, Tuple, List, Callable import numpy as np from .utils", "example of ill conditioned function. The other classical example is ellipsoid. \"\"\" return", "rastrigin(x: np.ndarray) -> float: \"\"\"Classical multimodal function.\"\"\" cosi = float(np.sum(np.cos(2 * np.pi *", "be used with translation/rotation @registry.register_with_info(no_transfrom=True) def hardonemax(y: np.ndarray) -> float: \"\"\"Onemax, with a", "def griewank(x: np.ndarray) -> float: \"\"\"Multimodal function, often used in Bayesian optimization.\"\"\" part1", "- (2 / (3**(x - 2)**2 + .1)) return float(np.max(dec)) @registry.register def sumdeceptive(x:", "1. return float(distance) @registry.register def deceptivemultimodal(x: np.ndarray) -> float: \"\"\"Infinitely many local optima,", "import PostponedObject from ..instrumentation import discretization from ..common.decorators import Registry registry = Registry[Callable[[np.ndarray],", "adapted for minimization. Returns len(x) - number of initial 1. I.e. leadingones([0 1", "is the most classical case of discrete functions, adapted to minimization. It is", "np.power(x, 2) + 5 * x) # return a positive value for maximization", "secondSum = 0.0 thirdSum = 0.0 for i in range(problemDimensions): firstSum += (x[i]-mu1)**2", "cigar(x: np.ndarray) -> float: \"\"\"Classical example of ill conditioned function. The other classical", "def _styblinksitang(x: np.ndarray, noise: float) -> float: \"\"\"Classical function for testing noisy optimization.\"\"\"", "of the Genz functions, originally used in integration, tested in optim because why", "(2.0 * np.sqrt(problemDimensions + 20.0) - 8.2)) mu1 = 2.5 mu2 = -", "if value == 0: return float(\"inf\") return value**(-len(y) - 1) @registry.register_with_info(no_transfrom=True) def minusgenzcornerpeak(y:", "of initial 1. I.e. leadingones([0 1 1 1]) = 4, leadingones([1 1 1", "return _leadingones(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardonemax5(y: np.ndarray) -> float: \"\"\"Hardonemax, with a discretization by", "-> float: x = args[0] return float(abs(1./x[0]) / 1000.) if x[0] != 0.", "float: return float(np.tanh(x[0])) @registry.register def st0(x: np.ndarray) -> float: \"\"\"Styblinksitang function with 0", "if x[0] != 0. else 0. registry.register(DelayedSphere()) @registry.register def sphere(x: np.ndarray) -> float:", "- 4.)**2)) @registry.register def maxdeceptive(x: np.ndarray) -> float: dec = 3 * x**2", "\"\"\"Leading ones, with a discretization in 2 by threshold 0 (>0 or <0).\"\"\"", "/ (2.0 * np.sqrt(problemDimensions + 20.0) - 8.2)) mu1 = 2.5 mu2 =", "def sphere4(x: np.ndarray) -> float: \"\"\"Even more translated sphere function.\"\"\" return float(np.sum((x -", "functions, originally used in integration, tested in optim because why not.\"\"\" value =", "variants of jump functions; we are in minimization. The principle of a jump", "maximization return float(39.16599 * len(x) + 1 * 0.5 * val + noise", "float: \"\"\"Classical example of ill conditioned function. The other classical example is cigar.", "by 2).\"\"\" return _onemax(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def jump(y: np.ndarray) -> float: \"\"\"Softmax discretization of", "not.\"\"\" return -float(np.exp(-sum(x**2 / 4.))) @registry.register def slope(x: np.ndarray) -> float: return sum(x)", "testbed. If you do not solve that one then you have a bug.\"\"\"", "i return 0 def _jump(x: List[int]) -> float: # TODO: docstring? \"\"\"There exists", "\"\"\"Jump, with a discretization by 5 with 4 thresholds (quantiles of Gaussian).\"\"\" return", "jump with 5 possibles values. This multiplies the dimension by 5.\"\"\" return _jump(discretization.softmax_discretization(y,", "..common.decorators import Registry registry = Registry[Callable[[np.ndarray], float]]() def _onemax(x: List[int]) -> float: \"\"\"onemax(x)", "_onemax(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardjump(y: np.ndarray) -> float: \"\"\"Hardjump, with a discretization in 2", "testing noisy optimization.\"\"\" x = np.asarray(x) val = np.sum(np.power(x, 4) - 16 *", "license found in the # LICENSE file in the root directory of this", "thirdSum = 0.0 for i in range(problemDimensions): firstSum += (x[i]-mu1)**2 secondSum += (x[i]-mu2)**2", "= np.sum(x**2) part2 = np.prod(np.cos(x / np.sqrt(1 + np.arange(len(x))))) return 1 + (float(part1)/4000.0)", "* np.sum(x[:-1]**2)) @registry.register def cigar(x: np.ndarray) -> float: \"\"\"Classical example of ill conditioned", "def hm(x: np.ndarray) -> float: \"\"\"New multimodal function (proposed for Nevergrad).\"\"\" return float(np.sum((x**2)", "functions using discretization should not be used with translation/rotation @registry.register_with_info(no_transfrom=True) def hardonemax(y: np.ndarray)", "float(np.sum(x**2)) def get_postponing_delay(self, args: Tuple[Any, ...], kwargs: Dict[str, Any], value: float) -> float:", "def altellipsoid(y: np.ndarray) -> float: \"\"\"Similar to Ellipsoid, but variables in inverse order.", "this source tree. import time from typing import Dict, Any, Tuple, List, Callable", "conditioned functions. Most algorithms fail on this. The condition number increases to infinity", "licensed under the MIT license found in the # LICENSE file in the", "in the root directory of this source tree. import time from typing import", "* x) # return a positive value for maximization return float(39.16599 * len(x)", "float: \"\"\"Multimodal function, often used in Bayesian optimization.\"\"\" part1 = np.sum(x**2) part2 =", "classical example is cigar. \"\"\" return sum((10**(6 * (i - 1) / float(len(x)", "@registry.register def rastrigin(x: np.ndarray) -> float: \"\"\"Classical multimodal function.\"\"\" cosi = float(np.sum(np.cos(2 *", "(This multiplies the dimension by 2).\"\"\" return _jump(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def leadingones(y: np.ndarray) ->", "TODO: docstring? \"\"\"There exists variants of jump functions; we are in minimization. The", "The condition number increases to infinity as we get closer to the optimum.\"\"\"", "on https://www.cs.unm.edu/~neal.holts/dga/benchmarkFunction/lunacek.html.\"\"\" problemDimensions = len(x) s = 1.0 - (1.0 / (2.0 *", "def altcigar(x: np.ndarray) -> float: \"\"\"Similar to cigar, but variables in inverse order.", "np.ndarray) -> float: \"\"\"Softmax discretization of onemax (This multiplies the dimension by 2).\"\"\"", "return _leadingones(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def onemax5(y: np.ndarray) -> float: \"\"\"Softmax discretization of onemax with", "variables in inverse order. E.g. for pointing out algorithms not invariant to the", "return float(abs(1./x[0]) / 1000.) if x[0] != 0. else 0. registry.register(DelayedSphere()) @registry.register def", "discretization of jump (This multiplies the dimension by 2).\"\"\" return _jump(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def", "/ 100000. if x[0] != 0. else 0.) return float(np.sum(x**2)) class DelayedSphere(PostponedObject): def", "o = n - _onemax(x) if o == n or o <= n", "leadingones([0 1 1 1]) = 4, leadingones([1 1 1 1]) = 0, leadingones([1", "@registry.register def griewank(x: np.ndarray) -> float: \"\"\"Multimodal function, often used in Bayesian optimization.\"\"\"", "return _onemax(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardjump(y: np.ndarray) -> float: \"\"\"Hardjump, with a discretization in", "get closer to the optimum.\"\"\" assert len(x) >= 2 return float(max(np.abs(np.arctan(x[1]/x[0])), np.sqrt(x[0]**2. +", "return value**(-len(y) - 1) @registry.register_with_info(no_transfrom=True) def minusgenzcornerpeak(y: np.ndarray) -> float: \"\"\"One of the", "def leadingones(y: np.ndarray) -> float: \"\"\"Softmax discretization of leadingones (This multiplies the dimension", "leadingones5(y: np.ndarray) -> float: \"\"\"Softmax discretization of leadingones with 5 possibles values. This", "<0).\"\"\" return _onemax(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardjump(y: np.ndarray) -> float: \"\"\"Hardjump, with a discretization", "float: \"\"\"Translated sphere function.\"\"\" return float(np.sum((x - 1.)**2)) @registry.register def sphere2(x: np.ndarray) ->", "-> float: \"\"\"Hardjump, with a discretization in 2 by threshold 0 (>0 or", "value for maximization return float(39.16599 * len(x) + 1 * 0.5 * val", "noise: float) -> float: \"\"\"Classical function for testing noisy optimization.\"\"\" x = np.asarray(x)", "long path. Most algorithms fail on this. The path becomes thiner as we", "order of variables.\"\"\" x = y[::-1] return sum((10**(6 * (i - 1) /", "else 0.) if x[0] != 0. else float(\"inf\")) @registry.register def deceptivepath(x: np.ndarray) ->", "4, leadingones([1 1 1 1]) = 0, leadingones([1 0 0 0]) = 1.", "code is licensed under the MIT license found in the # LICENSE file", "a discretization in 2 by threshold 0 (>0 or <0).\"\"\" return _jump(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True)", "a discretization by 5 with 4 thresholds (quantiles of Gaussian).\"\"\" return _leadingones(discretization.threshold_discretization(y, 5))", "@registry.register_with_info(no_transfrom=True) def leadingones5(y: np.ndarray) -> float: \"\"\"Softmax discretization of leadingones with 5 possibles", "value**(-len(y) - 1) @registry.register_with_info(no_transfrom=True) def minusgenzcornerpeak(y: np.ndarray) -> float: \"\"\"One of the Genz", "- m - o return o # Deceptive part. def _styblinksitang(x: np.ndarray, noise:", "+= (x[i]-mu1)**2 secondSum += (x[i]-mu2)**2 thirdSum += 1.0 - np.cos(2*np.pi*(x[i]-mu1)) return min(firstSum, 1.0*problemDimensions", "return sum((10**(6 * (i - 1) / float(len(x) - 1))) * (x[i]**2) for", "5.\"\"\" return _onemax(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def jump5(y: np.ndarray) -> float: \"\"\"Softmax discretization of", "5)) @registry.register_with_info(no_transfrom=True) def jump5(y: np.ndarray) -> float: \"\"\"Softmax discretization of jump with 5", "5)) @registry.register_with_info(no_transfrom=True) def genzcornerpeak(y: np.ndarray) -> float: \"\"\"One of the Genz functions, originally", "float(np.max(dec)) @registry.register def sumdeceptive(x: np.ndarray) -> float: dec = 3 * x**2 -", "return float(39.16599 * len(x) + 1 * 0.5 * val + noise *", "of jump (This multiplies the dimension by 2).\"\"\" return _jump(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def leadingones(y:", "classical case of discrete functions, adapted to minimization. It is originally designed for", "condition number increases to infinity as we get closer to the optimum.\"\"\" assert", "second most classical discrete function, adapted for minimization. Returns len(x) - number of", "float(np.sum(dec)) @registry.register def altcigar(x: np.ndarray) -> float: \"\"\"Similar to cigar, but variables in", "- (1.0 / (2.0 * np.sqrt(problemDimensions + 20.0) - 8.2)) mu1 = 2.5", "@registry.register def linear(x: np.ndarray) -> float: return float(np.tanh(x[0])) @registry.register def st0(x: np.ndarray) ->", "Tuple[Any, ...], kwargs: Dict[str, Any], value: float) -> float: x = args[0] return", "0.5 * val + noise * np.random.normal(size=val.shape)) @registry.register def delayedsphere(x: np.ndarray) -> float:", "= 0.0 thirdSum = 0.0 for i in range(problemDimensions): firstSum += (x[i]-mu1)**2 secondSum", "integration, tested in optim because why not.\"\"\" return -float(genzcornerpeak(y)) @registry.register def genzgaussianpeakintegral(x: np.ndarray)", "optima, as we get closer to the optimum.\"\"\" assert len(x) >= 2 distance", "root directory of this source tree. import time from typing import Dict, Any,", "onemax5(y: np.ndarray) -> float: \"\"\"Softmax discretization of onemax with 5 possibles values. This", "- 1.0) / s)) firstSum = 0.0 secondSum = 0.0 thirdSum = 0.0", "0 0]) = 1. \"\"\" for i, x_ in enumerate(list(x)): if int(round(x_)) !=", "Bayesian optimization.\"\"\" part1 = np.sum(x**2) part2 = np.prod(np.cos(x / np.sqrt(1 + np.arange(len(x))))) return", "return 1 + (float(part1)/4000.0) - float(part2) @registry.register def deceptiveillcond(x: np.ndarray) -> float: \"\"\"An", "-> float: \"\"\"Softmax discretization of onemax (This multiplies the dimension by 2).\"\"\" return", "invariant to the order of variables.\"\"\" return float(x[-1]**2 + 1000000. * np.sum(x[:-1]**2)) @registry.register", "optimization testbed. If you do not solve that one then you have a", "\"\"\"Softmax discretization of jump (This multiplies the dimension by 2).\"\"\" return _jump(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True)", "+ sphere(x)) @registry.register def hm(x: np.ndarray) -> float: \"\"\"New multimodal function (proposed for", "multiplies the dimension by 5.\"\"\" return _onemax(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def jump5(y: np.ndarray) ->", "(len(x) - cosi) + sphere(x)) @registry.register def hm(x: np.ndarray) -> float: \"\"\"New multimodal", "fail on this. The condition number increases to infinity as we get closer", "np.asarray(x) val = np.sum(np.power(x, 4) - 16 * np.power(x, 2) + 5 *", "many local optima, as we get closer to the optimum.\"\"\" assert len(x) >=", "\"\"\"Softmax discretization of jump with 5 possibles values. This multiplies the dimension by", "@registry.register_with_info(no_transfrom=True) def hardjump5(y: np.ndarray) -> float: \"\"\"Jump, with a discretization by 5 with", "@registry.register def st0(x: np.ndarray) -> float: \"\"\"Styblinksitang function with 0 noise.\"\"\" return _styblinksitang(x,", "ill conditioned function. The other classical example is ellipsoid. \"\"\" return float(x[0]**2 +", "firstSum = 0.0 secondSum = 0.0 thirdSum = 0.0 for i in range(problemDimensions):", "2).\"\"\" return _onemax(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def jump(y: np.ndarray) -> float: \"\"\"Softmax discretization of jump", ".1)) return float(np.max(dec)) @registry.register def sumdeceptive(x: np.ndarray) -> float: dec = 3 *", "len(x) - i return 0 def _jump(x: List[int]) -> float: # TODO: docstring?", "in integration, tested in optim because why not.\"\"\" return -float(np.exp(-sum(x**2 / 4.))) @registry.register", "descent does not succeed. Jumps are necessary. \"\"\" n = len(x) m =", "@registry.register def sphere(x: np.ndarray) -> float: \"\"\"The most classical continuous optimization testbed. If", "np.ndarray) -> float: \"\"\"Styblinksitang function with noise 10.\"\"\" return _styblinksitang(x, 10) @registry.register def", "-> float: \"\"\"Styblinksitang function with noise 1.\"\"\" return _styblinksitang(x, 1) @registry.register def st10(x:", "of bits. It just counts the number of 1, and returns len(x) -", "5 * x) # return a positive value for maximization return float(39.16599 *", "noisy optimization.\"\"\" x = np.asarray(x) val = np.sum(np.power(x, 4) - 16 * np.power(x,", "Registry[Callable[[np.ndarray], float]]() def _onemax(x: List[int]) -> float: \"\"\"onemax(x) is the most classical case", "= len(x) s = 1.0 - (1.0 / (2.0 * np.sqrt(problemDimensions + 20.0)", "- 2)**2 + .1)) return float(np.sum(dec)) @registry.register def altcigar(x: np.ndarray) -> float: \"\"\"Similar", "because why not.\"\"\" return -float(np.exp(-sum(x**2 / 4.))) @registry.register def slope(x: np.ndarray) -> float:", "2 by threshold 0 (>0 or <0).\"\"\" return _jump(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardleadingones(y: np.ndarray)", "4.))) @registry.register def minusgenzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One of the Genz functions, originally", "but variables in inverse order. E.g. for pointing out algorithms not invariant to", "float: return sum(x) @registry.register def linear(x: np.ndarray) -> float: return float(np.tanh(x[0])) @registry.register def", "return 1. return float(distance) @registry.register def deceptivemultimodal(x: np.ndarray) -> float: \"\"\"Infinitely many local", "numpy as np from .utils import PostponedObject from ..instrumentation import discretization from ..common.decorators", "range(problemDimensions): firstSum += (x[i]-mu1)**2 secondSum += (x[i]-mu2)**2 thirdSum += 1.0 - np.cos(2*np.pi*(x[i]-mu1)) return", "1.0 - np.cos(2*np.pi*(x[i]-mu1)) return min(firstSum, 1.0*problemDimensions + secondSum)+10*thirdSum # following functions using discretization", "sphere2(x: np.ndarray) -> float: \"\"\"A bit more translated sphere function.\"\"\" return float(np.sum((x -", "order. E.g. for pointing out algorithms not invariant to the order of variables.\"\"\"", "0. else float(\"inf\")) @registry.register def deceptivepath(x: np.ndarray) -> float: \"\"\"A function which needs", "= np.asarray(x) val = np.sum(np.power(x, 4) - 16 * np.power(x, 2) + 5", "Rights Reserved. # # This source code is licensed under the MIT license", "return float(x[0]**2 + 1000000. * np.sum(x[1:]**2)) @registry.register def altellipsoid(y: np.ndarray) -> float: \"\"\"Similar", "infinity as we get closer to the optimum.\"\"\" assert len(x) >= 2 return", "/ 1000.) if x[0] != 0. else 0. registry.register(DelayedSphere()) @registry.register def sphere(x: np.ndarray)", "thresholds (quantiles of Gaussian).\"\"\" return _onemax(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardjump5(y: np.ndarray) -> float:", "- _onemax(x) if o == n or o <= n - m: return", "succeed. Jumps are necessary. \"\"\" n = len(x) m = n // 4", "len(x) - sum(1 if int(round(w)) == 1 else 0 for w in x)", "def sumdeceptive(x: np.ndarray) -> float: dec = 3 * x**2 - (2 /", "-> float: \"\"\"Leading ones, with a discretization in 2 by threshold 0 (>0", "4 o = n - _onemax(x) if o == n or o <=", "minimization. It is originally designed for lists of bits. It just counts the", "5 possibles values. This multiplies the dimension by 5.\"\"\" return _jump(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True)", "def delayedsphere(x: np.ndarray) -> float: '''For asynchronous experiments, we induce delays.''' time.sleep(abs(1./x[0]) /", "return float(distance) @registry.register def lunacek(x: np.ndarray) -> float: \"\"\"Multimodal function. Based on https://www.cs.unm.edu/~neal.holts/dga/benchmarkFunction/lunacek.html.\"\"\"", "if np.abs(np.cos(invdistance) - angle) > 0.1: return 1. return float(distance) @registry.register def lunacek(x:", "return n - m - o return o # Deceptive part. def _styblinksitang(x:", "\"\"\"onemax(x) is the most classical case of discrete functions, adapted to minimization. It", "5 with 4 thresholds (quantiles of Gaussian).\"\"\" return _leadingones(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def onemax(y:", "1]) = 0, leadingones([1 0 0 0]) = 1. \"\"\" for i, x_", "= 4, leadingones([1 1 1 1]) = 0, leadingones([1 0 0 0]) =", "Most algorithms fail on this. The condition number increases to infinity as we", "positive value for maximization return float(39.16599 * len(x) + 1 * 0.5 *", "conditioned function. The other classical example is cigar. \"\"\" return sum((10**(6 * (i", "x[1]) if x[1] != 0. else np.pi / 2. invdistance = int(1. /", "experiments, we induce delays.''' time.sleep(abs(1./x[0]) / 100000. if x[0] != 0. else 0.)", "\"\"\"Infinitely many local optima, as we get closer to the optimum.\"\"\" assert len(x)", "1. \"\"\" for i, x_ in enumerate(list(x)): if int(round(x_)) != 1: return len(x)", "tested in optim because why not.\"\"\" return -float(np.exp(-sum(x**2 / 4.))) @registry.register def slope(x:", "5)) @registry.register_with_info(no_transfrom=True) def hardjump5(y: np.ndarray) -> float: \"\"\"Jump, with a discretization by 5", "x[1] != 0. else np.pi / 2. invdistance = int(1. / distance) if", "ones, with a discretization in 2 by threshold 0 (>0 or <0).\"\"\" return", "@registry.register_with_info(no_transfrom=True) def hardonemax(y: np.ndarray) -> float: \"\"\"Onemax, with a discretization in 2 by", "else. \"\"\" return len(x) - sum(1 if int(round(w)) == 1 else 0 for", "(quantiles of Gaussian).\"\"\" return _leadingones(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def onemax(y: np.ndarray) -> float: \"\"\"Softmax", "the optimum.\"\"\" assert len(x) >= 2 return float(max(np.abs(np.arctan(x[1]/x[0])), np.sqrt(x[0]**2. + x[1]**2.), 1. if", "in 2 by threshold 0 (>0 or <0).\"\"\" return _jump(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardleadingones(y:", "_onemax(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardjump5(y: np.ndarray) -> float: \"\"\"Jump, with a discretization by", "0 (>0 or <0).\"\"\" return _onemax(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardjump(y: np.ndarray) -> float: \"\"\"Hardjump,", "np.ndarray) -> float: \"\"\"Classical multimodal function.\"\"\" cosi = float(np.sum(np.cos(2 * np.pi * x)))", "are in minimization. The principle of a jump function is that local descent", "jump (This multiplies the dimension by 2).\"\"\" return _jump(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def leadingones(y: np.ndarray)", "number increases to infinity as we get closer to the optimum.\"\"\" assert len(x)", "= args[0] return float(abs(1./x[0]) / 1000.) if x[0] != 0. else 0. registry.register(DelayedSphere())", "= 0.0 for i in range(problemDimensions): firstSum += (x[i]-mu1)**2 secondSum += (x[i]-mu2)**2 thirdSum", "threshold 0 (>0 or <0).\"\"\" return _onemax(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardjump(y: np.ndarray) -> float:", "return float(np.exp(-np.sum(x**2 / 4.))) @registry.register def minusgenzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One of the", "def st10(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise 10.\"\"\" return _styblinksitang(x, 10)", "* (x[i]**2) for i in range(len(x))) @registry.register def ellipsoid(x: np.ndarray) -> float: \"\"\"Classical", "\"\"\" return float(x[0]**2 + 1000000. * np.sum(x[1:]**2)) @registry.register def altellipsoid(y: np.ndarray) -> float:", "float(x[-1]**2 + 1000000. * np.sum(x[:-1]**2)) @registry.register def cigar(x: np.ndarray) -> float: \"\"\"Classical example", "* np.random.normal(size=val.shape)) @registry.register def delayedsphere(x: np.ndarray) -> float: '''For asynchronous experiments, we induce", "__call__(self, x: np.ndarray) -> float: return float(np.sum(x**2)) def get_postponing_delay(self, args: Tuple[Any, ...], kwargs:", "possibles values. This multiplies the dimension by 5.\"\"\" return _jump(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def", "@registry.register def delayedsphere(x: np.ndarray) -> float: '''For asynchronous experiments, we induce delays.''' time.sleep(abs(1./x[0])", "\"\"\"Classical multimodal function.\"\"\" cosi = float(np.sum(np.cos(2 * np.pi * x))) return float(10 *", "the Genz functions, originally used in integration, tested in optim because why not.\"\"\"", "!= 1: return len(x) - i return 0 def _jump(x: List[int]) -> float:", "inverse order. E.g. for pointing out algorithms not invariant to the order of", "file in the root directory of this source tree. import time from typing", "4 thresholds (quantiles of Gaussian).\"\"\" return _onemax(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardjump5(y: np.ndarray) ->", "source tree. import time from typing import Dict, Any, Tuple, List, Callable import", "the dimension by 2).\"\"\" return _leadingones(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def onemax5(y: np.ndarray) -> float: \"\"\"Softmax", "range(len(x))) @registry.register def rastrigin(x: np.ndarray) -> float: \"\"\"Classical multimodal function.\"\"\" cosi = float(np.sum(np.cos(2", "float: return float(np.sum(x**2)) def get_postponing_delay(self, args: Tuple[Any, ...], kwargs: Dict[str, Any], value: float)", "np.abs(np.cos(invdistance) - angle) > 0.1: return 1. return float(distance) @registry.register def lunacek(x: np.ndarray)", "1. if x[0] > 0 else 0.) if x[0] != 0. else float(\"inf\"))", "functions, originally used in integration, tested in optim because why not.\"\"\" return float(np.exp(-np.sum(x**2", "/ 4.))) @registry.register def slope(x: np.ndarray) -> float: return sum(x) @registry.register def linear(x:", "-> float: return sum(100.0*(x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0) @registry.register def griewank(x:", "Returns len(x) - number of initial 1. I.e. leadingones([0 1 1 1]) =", "that local descent does not succeed. Jumps are necessary. \"\"\" n = len(x)", "function.\"\"\" return float(np.sum((x - 2.)**2)) @registry.register def sphere4(x: np.ndarray) -> float: \"\"\"Even more", "@registry.register def sphere1(x: np.ndarray) -> float: \"\"\"Translated sphere function.\"\"\" return float(np.sum((x - 1.)**2))", "\"\"\"Softmax discretization of leadingones (This multiplies the dimension by 2).\"\"\" return _leadingones(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True)", "return 0. angle = np.arctan(x[0] / x[1]) if x[1] != 0. else np.pi", "o # Deceptive part. def _styblinksitang(x: np.ndarray, noise: float) -> float: \"\"\"Classical function", "jump function is that local descent does not succeed. Jumps are necessary. \"\"\"", "def sphere2(x: np.ndarray) -> float: \"\"\"A bit more translated sphere function.\"\"\" return float(np.sum((x", "invdistance = (1. / distance) if distance > 0. else 0. if np.abs(np.cos(invdistance)", "- 1) @registry.register_with_info(no_transfrom=True) def minusgenzcornerpeak(y: np.ndarray) -> float: \"\"\"One of the Genz functions,", "- angle) > 0.1: return 1. return float(distance) @registry.register def lunacek(x: np.ndarray) ->", "1.0) / s)) firstSum = 0.0 secondSum = 0.0 thirdSum = 0.0 for", "return float(10 * (len(x) - cosi) + sphere(x)) @registry.register def hm(x: np.ndarray) ->", "np.sum(x[:-1]**2)) @registry.register def cigar(x: np.ndarray) -> float: \"\"\"Classical example of ill conditioned function.", "\"\"\"New multimodal function (proposed for Nevergrad).\"\"\" return float(np.sum((x**2) * (1.1 + np.cos(1. /", "other classical example is ellipsoid. \"\"\" return float(x[0]**2 + 1000000. * np.sum(x[1:]**2)) @registry.register", "!= 0. else 0. registry.register(DelayedSphere()) @registry.register def sphere(x: np.ndarray) -> float: \"\"\"The most", "@registry.register def sphere2(x: np.ndarray) -> float: \"\"\"A bit more translated sphere function.\"\"\" return", "discretization by 5 with 4 thresholds (quantiles of Gaussian).\"\"\" return _leadingones(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True)", "part. def _styblinksitang(x: np.ndarray, noise: float) -> float: \"\"\"Classical function for testing noisy", "0 everywhere else. \"\"\" return len(x) - sum(1 if int(round(w)) == 1 else", "np.sqrt(problemDimensions + 20.0) - 8.2)) mu1 = 2.5 mu2 = - np.sqrt(abs((mu1**2 -", "(proposed for Nevergrad).\"\"\" return float(np.sum((x**2) * (1.1 + np.cos(1. / x)))) @registry.register def", "source code is licensed under the MIT license found in the # LICENSE", "of variables.\"\"\" x = y[::-1] return sum((10**(6 * (i - 1) / float(len(x)", "return -float(genzcornerpeak(y)) @registry.register def genzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One of the Genz functions,", "- angle) > 0.1: return 1. return float(distance) @registry.register def deceptivemultimodal(x: np.ndarray) ->", "-> float: \"\"\"Even more translated sphere function.\"\"\" return float(np.sum((x - 4.)**2)) @registry.register def", "ones.. It also works in the continuous case but in that cases discretizes", "np.ndarray) -> float: \"\"\"Even more translated sphere function.\"\"\" return float(np.sum((x - 4.)**2)) @registry.register", "algorithms fail on this. The path becomes thiner as we get closer to", "tested in optim because why not.\"\"\" return float(np.exp(-np.sum(x**2 / 4.))) @registry.register def minusgenzgaussianpeakintegral(x:", "+ x[1]**2) if distance == 0.: return 0. angle = np.arctan(x[0] / x[1])", "float: \"\"\"Styblinksitang function with noise 1.\"\"\" return _styblinksitang(x, 1) @registry.register def st10(x: np.ndarray)", "hardjump(y: np.ndarray) -> float: \"\"\"Hardjump, with a discretization in 2 by threshold 0", "closer to the optimum.\"\"\" assert len(x) >= 2 return float(max(np.abs(np.arctan(x[1]/x[0])), np.sqrt(x[0]**2. + x[1]**2.),", "for i, x_ in enumerate(list(x)): if int(round(x_)) != 1: return len(x) - i", "0.1: return 1. return float(distance) @registry.register def lunacek(x: np.ndarray) -> float: \"\"\"Multimodal function.", "return float(distance) @registry.register def deceptivemultimodal(x: np.ndarray) -> float: \"\"\"Infinitely many local optima, as", "or <0).\"\"\" return _jump(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardleadingones(y: np.ndarray) -> float: \"\"\"Leading ones, with", "ill conditioned functions. Most algorithms fail on this. The condition number increases to", "def st1(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise 1.\"\"\" return _styblinksitang(x, 1)", "/ x[1]) if x[1] != 0. else np.pi / 2. invdistance = int(1.", "functions; we are in minimization. The principle of a jump function is that", "to the optimum.\"\"\" assert len(x) >= 2 distance = np.sqrt(x[0]**2 + x[1]**2) if", "the dimension by 5.\"\"\" return _jump(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def leadingones5(y: np.ndarray) -> float:", "8.2)) mu1 = 2.5 mu2 = - np.sqrt(abs((mu1**2 - 1.0) / s)) firstSum", "-> float: \"\"\"onemax(x) is the most classical case of discrete functions, adapted to", "float: \"\"\"Similar to cigar, but variables in inverse order. E.g. for pointing out", "assert len(x) >= 2 distance = np.sqrt(x[0]**2 + x[1]**2) if distance == 0.:", "Genz functions, originally used in integration, tested in optim because why not.\"\"\" value", "sphere function.\"\"\" return float(np.sum((x - 4.)**2)) @registry.register def maxdeceptive(x: np.ndarray) -> float: dec", "np.cos(1. / x)))) @registry.register def rosenbrock(x: np.ndarray) -> float: return sum(100.0*(x[1:] - x[:-1]**2.0)**2.0", "functions. Most algorithms fail on this. The condition number increases to infinity as", "with 0 noise.\"\"\" return _styblinksitang(x, 0) @registry.register def st1(x: np.ndarray) -> float: \"\"\"Styblinksitang", "return float(np.max(dec)) @registry.register def sumdeceptive(x: np.ndarray) -> float: dec = 3 * x**2", "tested in optim because why not.\"\"\" value = float(1 + np.mean(np.tanh(y))) if value", "0) @registry.register def st1(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise 1.\"\"\" return", "This source code is licensed under the MIT license found in the #", "import Dict, Any, Tuple, List, Callable import numpy as np from .utils import", "x[:-1])**2.0) @registry.register def griewank(x: np.ndarray) -> float: \"\"\"Multimodal function, often used in Bayesian", "float(len(x) - 1))) * (x[i]**2) for i in range(len(x))) @registry.register def ellipsoid(x: np.ndarray)", "in minimization. The principle of a jump function is that local descent does", "of onemax with 5 possibles values. This multiplies the dimension by 5.\"\"\" return", "discrete functions, adapted to minimization. It is originally designed for lists of bits.", "-> float: \"\"\"Infinitely many local optima, as we get closer to the optimum.\"\"\"", "1) @registry.register def st10(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise 10.\"\"\" return", "float(10 * (len(x) - cosi) + sphere(x)) @registry.register def hm(x: np.ndarray) -> float:", "0.0 for i in range(problemDimensions): firstSum += (x[i]-mu1)**2 secondSum += (x[i]-mu2)**2 thirdSum +=", "by 5 with 4 thresholds (quantiles of Gaussian).\"\"\" return _onemax(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def", "for i in range(len(x))) @registry.register def ellipsoid(x: np.ndarray) -> float: \"\"\"Classical example of", "x[1] != 0. else np.pi / 2. invdistance = (1. / distance) if", "np.ndarray) -> float: \"\"\"Multimodal function. Based on https://www.cs.unm.edu/~neal.holts/dga/benchmarkFunction/lunacek.html.\"\"\" problemDimensions = len(x) s =", "in range(len(x))) @registry.register def ellipsoid(x: np.ndarray) -> float: \"\"\"Classical example of ill conditioned", "* (i - 1) / float(len(x) - 1))) * (x[i]**2) for i in", "-> float: \"\"\"One of the Genz functions, originally used in integration, tested in", "def st100(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise 100.\"\"\" return _styblinksitang(x, 100)", "x)))) @registry.register def rosenbrock(x: np.ndarray) -> float: return sum(100.0*(x[1:] - x[:-1]**2.0)**2.0 + (1", "return _onemax(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def jump5(y: np.ndarray) -> float: \"\"\"Softmax discretization of jump", "def linear(x: np.ndarray) -> float: return float(np.tanh(x[0])) @registry.register def st0(x: np.ndarray) -> float:", "-> float: \"\"\"New multimodal function (proposed for Nevergrad).\"\"\" return float(np.sum((x**2) * (1.1 +", "input domain by ]0.5,1.5] --> 1 and 0 everywhere else. \"\"\" return len(x)", "(>0 or <0).\"\"\" return _onemax(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardjump(y: np.ndarray) -> float: \"\"\"Hardjump, with", "float: \"\"\"Hardjump, with a discretization in 2 by threshold 0 (>0 or <0).\"\"\"", "== 0: return float(\"inf\") return value**(-len(y) - 1) @registry.register_with_info(no_transfrom=True) def minusgenzcornerpeak(y: np.ndarray) ->", "def deceptivemultimodal(x: np.ndarray) -> float: \"\"\"Infinitely many local optima, as we get closer", "float: \"\"\"Infinitely many local optima, as we get closer to the optimum.\"\"\" assert", "the optimum.\"\"\" assert len(x) >= 2 distance = np.sqrt(x[0]**2 + x[1]**2) if distance", "= np.prod(np.cos(x / np.sqrt(1 + np.arange(len(x))))) return 1 + (float(part1)/4000.0) - float(part2) @registry.register", "the root directory of this source tree. import time from typing import Dict,", "DelayedSphere(PostponedObject): def __call__(self, x: np.ndarray) -> float: return float(np.sum(x**2)) def get_postponing_delay(self, args: Tuple[Any,", "import numpy as np from .utils import PostponedObject from ..instrumentation import discretization from", "- i return 0 def _jump(x: List[int]) -> float: # TODO: docstring? \"\"\"There", "originally used in integration, tested in optim because why not.\"\"\" return float(np.exp(-np.sum(x**2 /", "_leadingones(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def onemax(y: np.ndarray) -> float: \"\"\"Softmax discretization of onemax (This", "float]]() def _onemax(x: List[int]) -> float: \"\"\"onemax(x) is the most classical case of", "1]) = 4, leadingones([1 1 1 1]) = 0, leadingones([1 0 0 0])", "np.ndarray) -> float: \"\"\"Styblinksitang function with noise 1.\"\"\" return _styblinksitang(x, 1) @registry.register def", "translation/rotation @registry.register_with_info(no_transfrom=True) def hardonemax(y: np.ndarray) -> float: \"\"\"Onemax, with a discretization in 2", "functions, adapted to minimization. It is originally designed for lists of bits. It", "np.pi / 2. invdistance = (1. / distance) if distance > 0. else", "on this. The condition number increases to infinity as we get closer to", "invariant to the order of variables.\"\"\" x = y[::-1] return sum((10**(6 * (i", "with 5 possibles values. This multiplies the dimension by 5.\"\"\" return _jump(discretization.softmax_discretization(y, 5))", "minimization. The principle of a jump function is that local descent does not", "<0).\"\"\" return _leadingones(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardonemax5(y: np.ndarray) -> float: \"\"\"Hardonemax, with a discretization", "def leadingones5(y: np.ndarray) -> float: \"\"\"Softmax discretization of leadingones with 5 possibles values.", "!= 0. else np.pi / 2. invdistance = (1. / distance) if distance", "\"\"\"Classical example of ill conditioned function. The other classical example is ellipsoid. \"\"\"", "100000. if x[0] != 0. else 0.) return float(np.sum(x**2)) class DelayedSphere(PostponedObject): def __call__(self,", "continuous optimization testbed. If you do not solve that one then you have", "cigar, but variables in inverse order. E.g. for pointing out algorithms not invariant", "more translated sphere function.\"\"\" return float(np.sum((x - 4.)**2)) @registry.register def maxdeceptive(x: np.ndarray) ->", "return _styblinksitang(x, 1) @registry.register def st10(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise", "float(part2) @registry.register def deceptiveillcond(x: np.ndarray) -> float: \"\"\"An extreme ill conditioned functions. Most", "minimization. Returns len(x) - number of initial 1. I.e. leadingones([0 1 1 1])", "_onemax(x) if o == n or o <= n - m: return n", "function.\"\"\" return float(np.sum((x - 1.)**2)) @registry.register def sphere2(x: np.ndarray) -> float: \"\"\"A bit", "-> float: \"\"\"Classical multimodal function.\"\"\" cosi = float(np.sum(np.cos(2 * np.pi * x))) return", "int(1. / distance) if distance > 0. else 0. if np.abs(np.cos(invdistance) - angle)", "n // 4 o = n - _onemax(x) if o == n or", "float: \"\"\"leadingones is the second most classical discrete function, adapted for minimization. Returns", "dec = 3 * x**2 - (2 / (3**(x - 2)**2 + .1))", "algorithms fail on this. The condition number increases to infinity as we get", "1. return float(distance) @registry.register def lunacek(x: np.ndarray) -> float: \"\"\"Multimodal function. Based on", "are necessary. \"\"\" n = len(x) m = n // 4 o =", "range(len(x))) @registry.register def ellipsoid(x: np.ndarray) -> float: \"\"\"Classical example of ill conditioned function.", "/ (3**(x - 2)**2 + .1)) return float(np.sum(dec)) @registry.register def altcigar(x: np.ndarray) ->", "float: x = args[0] return float(abs(1./x[0]) / 1000.) if x[0] != 0. else", "-> float: \"\"\"Similar to Ellipsoid, but variables in inverse order. E.g. for pointing", "# This source code is licensed under the MIT license found in the", "- 1))) * (x[i]**2) for i in range(len(x))) @registry.register def rastrigin(x: np.ndarray) ->", "i in range(len(x))) @registry.register def ellipsoid(x: np.ndarray) -> float: \"\"\"Classical example of ill", "Callable import numpy as np from .utils import PostponedObject from ..instrumentation import discretization", "= float(1 + np.mean(np.tanh(y))) if value == 0: return float(\"inf\") return value**(-len(y) -", "float: \"\"\"Softmax discretization of onemax (This multiplies the dimension by 2).\"\"\" return _onemax(discretization.softmax_discretization(y))", "\"\"\"An extreme ill conditioned functions. Most algorithms fail on this. The condition number", "import discretization from ..common.decorators import Registry registry = Registry[Callable[[np.ndarray], float]]() def _onemax(x: List[int])", "@registry.register def rosenbrock(x: np.ndarray) -> float: return sum(100.0*(x[1:] - x[:-1]**2.0)**2.0 + (1 -", "0. else np.pi / 2. invdistance = int(1. / distance) if distance >", ">= 2 distance = np.sqrt(x[0]**2 + x[1]**2) if distance == 0.: return 0.", "hardleadingones(y: np.ndarray) -> float: \"\"\"Leading ones, with a discretization in 2 by threshold", "variables.\"\"\" x = y[::-1] return sum((10**(6 * (i - 1) / float(len(x) -", "n - _onemax(x) if o == n or o <= n - m:", "def ellipsoid(x: np.ndarray) -> float: \"\"\"Classical example of ill conditioned function. The other", "The principle of a jump function is that local descent does not succeed.", "@registry.register_with_info(no_transfrom=True) def hardjump(y: np.ndarray) -> float: \"\"\"Hardjump, with a discretization in 2 by", "else float(\"inf\")) @registry.register def deceptivepath(x: np.ndarray) -> float: \"\"\"A function which needs following", "/ x[1]) if x[1] != 0. else np.pi / 2. invdistance = (1.", "_styblinksitang(x, 10) @registry.register def st100(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise 100.\"\"\"", "float: \"\"\"Jump, with a discretization by 5 with 4 thresholds (quantiles of Gaussian).\"\"\"", "float: '''For asynchronous experiments, we induce delays.''' time.sleep(abs(1./x[0]) / 100000. if x[0] !=", "multiplies the dimension by 2).\"\"\" return _jump(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def leadingones(y: np.ndarray) -> float:", "return -float(np.exp(-sum(x**2 / 4.))) @registry.register def slope(x: np.ndarray) -> float: return sum(x) @registry.register", "= 0, leadingones([1 0 0 0]) = 1. \"\"\" for i, x_ in", "found in the # LICENSE file in the root directory of this source", "of Gaussian).\"\"\" return _onemax(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardjump5(y: np.ndarray) -> float: \"\"\"Jump, with", "firstSum += (x[i]-mu1)**2 secondSum += (x[i]-mu2)**2 thirdSum += 1.0 - np.cos(2*np.pi*(x[i]-mu1)) return min(firstSum,", "def rastrigin(x: np.ndarray) -> float: \"\"\"Classical multimodal function.\"\"\" cosi = float(np.sum(np.cos(2 * np.pi", "values. This multiplies the dimension by 5.\"\"\" return _leadingones(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def genzcornerpeak(y:", "List[int]) -> float: \"\"\"onemax(x) is the most classical case of discrete functions, adapted", "discretization should not be used with translation/rotation @registry.register_with_info(no_transfrom=True) def hardonemax(y: np.ndarray) -> float:", "val = np.sum(np.power(x, 4) - 16 * np.power(x, 2) + 5 * x)", "20.0) - 8.2)) mu1 = 2.5 mu2 = - np.sqrt(abs((mu1**2 - 1.0) /", "MIT license found in the # LICENSE file in the root directory of", "assert len(x) >= 2 return float(max(np.abs(np.arctan(x[1]/x[0])), np.sqrt(x[0]**2. + x[1]**2.), 1. if x[0] >", "angle) > 0.1: return 1. return float(distance) @registry.register def deceptivemultimodal(x: np.ndarray) -> float:", "function with noise 1.\"\"\" return _styblinksitang(x, 1) @registry.register def st10(x: np.ndarray) -> float:", "multimodal function (proposed for Nevergrad).\"\"\" return float(np.sum((x**2) * (1.1 + np.cos(1. / x))))", "increases to infinity as we get closer to the optimum.\"\"\" assert len(x) >=", "@registry.register_with_info(no_transfrom=True) def hardleadingones(y: np.ndarray) -> float: \"\"\"Leading ones, with a discretization in 2", "its affiliates. All Rights Reserved. # # This source code is licensed under", "hm(x: np.ndarray) -> float: \"\"\"New multimodal function (proposed for Nevergrad).\"\"\" return float(np.sum((x**2) *", "return _jump(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardleadingones5(y: np.ndarray) -> float: \"\"\"Leadingones, with a discretization", "PostponedObject from ..instrumentation import discretization from ..common.decorators import Registry registry = Registry[Callable[[np.ndarray], float]]()", "List, Callable import numpy as np from .utils import PostponedObject from ..instrumentation import", "that one then you have a bug.\"\"\" return float(np.sum(x**2)) @registry.register def sphere1(x: np.ndarray)", "becomes thiner as we get closer to the optimum.\"\"\" assert len(x) >= 2", "- o return o # Deceptive part. def _styblinksitang(x: np.ndarray, noise: float) ->", "to infinity as we get closer to the optimum.\"\"\" assert len(x) >= 2", "if x[1] != 0. else np.pi / 2. invdistance = (1. / distance)", "sphere(x: np.ndarray) -> float: \"\"\"The most classical continuous optimization testbed. If you do", "translated sphere function.\"\"\" return float(np.sum((x - 2.)**2)) @registry.register def sphere4(x: np.ndarray) -> float:", "return float(np.sum(x**2)) @registry.register def sphere1(x: np.ndarray) -> float: \"\"\"Translated sphere function.\"\"\" return float(np.sum((x", "dimension by 2).\"\"\" return _onemax(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def jump(y: np.ndarray) -> float: \"\"\"Softmax discretization", "5.\"\"\" return _jump(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def leadingones5(y: np.ndarray) -> float: \"\"\"Softmax discretization of", "2 by threshold 0 (>0 or <0).\"\"\" return _leadingones(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardonemax5(y: np.ndarray)", "1 1 1]) = 4, leadingones([1 1 1 1]) = 0, leadingones([1 0", "return len(x) - i return 0 def _jump(x: List[int]) -> float: # TODO:", "in the continuous case but in that cases discretizes the input domain by", "np.sqrt(abs((mu1**2 - 1.0) / s)) firstSum = 0.0 secondSum = 0.0 thirdSum =", "def deceptiveillcond(x: np.ndarray) -> float: \"\"\"An extreme ill conditioned functions. Most algorithms fail", "bit more translated sphere function.\"\"\" return float(np.sum((x - 2.)**2)) @registry.register def sphere4(x: np.ndarray)", "float: \"\"\"Softmax discretization of jump (This multiplies the dimension by 2).\"\"\" return _jump(discretization.softmax_discretization(y))", "originally used in integration, tested in optim because why not.\"\"\" return -float(np.exp(-sum(x**2 /", "of variables.\"\"\" return float(x[-1]**2 + 1000000. * np.sum(x[:-1]**2)) @registry.register def cigar(x: np.ndarray) ->", "= 2.5 mu2 = - np.sqrt(abs((mu1**2 - 1.0) / s)) firstSum = 0.0", "for pointing out algorithms not invariant to the order of variables.\"\"\" x =", "0. else 0. registry.register(DelayedSphere()) @registry.register def sphere(x: np.ndarray) -> float: \"\"\"The most classical", "lunacek(x: np.ndarray) -> float: \"\"\"Multimodal function. Based on https://www.cs.unm.edu/~neal.holts/dga/benchmarkFunction/lunacek.html.\"\"\" problemDimensions = len(x) s", "def cigar(x: np.ndarray) -> float: \"\"\"Classical example of ill conditioned function. The other", "0 0 0]) = 1. \"\"\" for i, x_ in enumerate(list(x)): if int(round(x_))", "@registry.register def maxdeceptive(x: np.ndarray) -> float: dec = 3 * x**2 - (2", "by 5.\"\"\" return _jump(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def leadingones5(y: np.ndarray) -> float: \"\"\"Softmax discretization", "m = n // 4 o = n - _onemax(x) if o ==", "not invariant to the order of variables.\"\"\" x = y[::-1] return sum((10**(6 *", "leadingones (This multiplies the dimension by 2).\"\"\" return _leadingones(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def onemax5(y: np.ndarray)", "st10(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise 10.\"\"\" return _styblinksitang(x, 10) @registry.register", "ill conditioned function. The other classical example is cigar. \"\"\" return sum((10**(6 *", "def genzcornerpeak(y: np.ndarray) -> float: \"\"\"One of the Genz functions, originally used in", "float: \"\"\"onemax(x) is the most classical case of discrete functions, adapted to minimization.", "(1 - x[:-1])**2.0) @registry.register def griewank(x: np.ndarray) -> float: \"\"\"Multimodal function, often used", "!= 0. else 0.) return float(np.sum(x**2)) class DelayedSphere(PostponedObject): def __call__(self, x: np.ndarray) ->", "have a bug.\"\"\" return float(np.sum(x**2)) @registry.register def sphere1(x: np.ndarray) -> float: \"\"\"Translated sphere", "(x[i]-mu2)**2 thirdSum += 1.0 - np.cos(2*np.pi*(x[i]-mu1)) return min(firstSum, 1.0*problemDimensions + secondSum)+10*thirdSum # following", "to the order of variables.\"\"\" return float(x[-1]**2 + 1000000. * np.sum(x[:-1]**2)) @registry.register def", "used in integration, tested in optim because why not.\"\"\" return float(np.exp(-np.sum(x**2 / 4.)))", "- np.sqrt(abs((mu1**2 - 1.0) / s)) firstSum = 0.0 secondSum = 0.0 thirdSum", "thiner as we get closer to the optimum.\"\"\" assert len(x) >= 2 distance", "altcigar(x: np.ndarray) -> float: \"\"\"Similar to cigar, but variables in inverse order. E.g.", "= int(1. / distance) if distance > 0. else 0. if np.abs(np.cos(invdistance) -", "@registry.register_with_info(no_transfrom=True) def minusgenzcornerpeak(y: np.ndarray) -> float: \"\"\"One of the Genz functions, originally used", "n - m - o return o # Deceptive part. def _styblinksitang(x: np.ndarray,", "0 for w in x) def _leadingones(x: List[int]) -> float: \"\"\"leadingones is the", "return _jump(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def leadingones(y: np.ndarray) -> float: \"\"\"Softmax discretization of leadingones (This", "E.g. for pointing out algorithms not invariant to the order of variables.\"\"\" x", "]0.5,1.5] --> 1 and 0 everywhere else. \"\"\" return len(x) - sum(1 if", "and 0 everywhere else. \"\"\" return len(x) - sum(1 if int(round(w)) == 1", "@registry.register def deceptivemultimodal(x: np.ndarray) -> float: \"\"\"Infinitely many local optima, as we get", "not solve that one then you have a bug.\"\"\" return float(np.sum(x**2)) @registry.register def", "return 0 def _jump(x: List[int]) -> float: # TODO: docstring? \"\"\"There exists variants", "i, x_ in enumerate(list(x)): if int(round(x_)) != 1: return len(x) - i return", "float(\"inf\")) @registry.register def deceptivepath(x: np.ndarray) -> float: \"\"\"A function which needs following a", "leadingones with 5 possibles values. This multiplies the dimension by 5.\"\"\" return _leadingones(discretization.softmax_discretization(y,", "you have a bug.\"\"\" return float(np.sum(x**2)) @registry.register def sphere1(x: np.ndarray) -> float: \"\"\"Translated", "4.))) @registry.register def slope(x: np.ndarray) -> float: return sum(x) @registry.register def linear(x: np.ndarray)", "np.mean(np.tanh(y))) if value == 0: return float(\"inf\") return value**(-len(y) - 1) @registry.register_with_info(no_transfrom=True) def", "float: \"\"\"Softmax discretization of leadingones (This multiplies the dimension by 2).\"\"\" return _leadingones(discretization.softmax_discretization(y))", "function for testing noisy optimization.\"\"\" x = np.asarray(x) val = np.sum(np.power(x, 4) -", "def hardjump(y: np.ndarray) -> float: \"\"\"Hardjump, with a discretization in 2 by threshold", "\"\"\"Similar to cigar, but variables in inverse order. E.g. for pointing out algorithms", "-> float: '''For asynchronous experiments, we induce delays.''' time.sleep(abs(1./x[0]) / 100000. if x[0]", "of leadingones (This multiplies the dimension by 2).\"\"\" return _leadingones(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def onemax5(y:", "not.\"\"\" return float(np.exp(-np.sum(x**2 / 4.))) @registry.register def minusgenzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One of", "4) - 16 * np.power(x, 2) + 5 * x) # return a", "5 with 4 thresholds (quantiles of Gaussian).\"\"\" return _onemax(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardjump5(y:", "\"\"\"A function which needs following a long path. Most algorithms fail on this.", "= Registry[Callable[[np.ndarray], float]]() def _onemax(x: List[int]) -> float: \"\"\"onemax(x) is the most classical", "2).\"\"\" return _jump(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def leadingones(y: np.ndarray) -> float: \"\"\"Softmax discretization of leadingones", "return _styblinksitang(x, 10) @registry.register def st100(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise", "that cases discretizes the input domain by ]0.5,1.5] --> 1 and 0 everywhere", "Gaussian).\"\"\" return _onemax(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardjump5(y: np.ndarray) -> float: \"\"\"Jump, with a", "in that cases discretizes the input domain by ]0.5,1.5] --> 1 and 0", "discretization in 2 by threshold 0 (>0 or <0).\"\"\" return _onemax(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def", "optimization.\"\"\" part1 = np.sum(x**2) part2 = np.prod(np.cos(x / np.sqrt(1 + np.arange(len(x))))) return 1", "the most classical case of discrete functions, adapted to minimization. It is originally", "m - o return o # Deceptive part. def _styblinksitang(x: np.ndarray, noise: float)", "def hardleadingones(y: np.ndarray) -> float: \"\"\"Leading ones, with a discretization in 2 by", "with 4 thresholds (quantiles of Gaussian).\"\"\" return _leadingones(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def onemax(y: np.ndarray)", "-float(np.exp(-sum(x**2 / 4.))) @registry.register def slope(x: np.ndarray) -> float: return sum(x) @registry.register def", "jump functions; we are in minimization. The principle of a jump function is", "because why not.\"\"\" return -float(genzcornerpeak(y)) @registry.register def genzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One of", "This multiplies the dimension by 5.\"\"\" return _jump(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def leadingones5(y: np.ndarray)", "solve that one then you have a bug.\"\"\" return float(np.sum(x**2)) @registry.register def sphere1(x:", "import time from typing import Dict, Any, Tuple, List, Callable import numpy as", "np.ndarray) -> float: \"\"\"Similar to cigar, but variables in inverse order. E.g. for", "It is originally designed for lists of bits. It just counts the number", "designed for lists of bits. It just counts the number of 1, and", "_onemax(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def jump5(y: np.ndarray) -> float: \"\"\"Softmax discretization of jump with", "All Rights Reserved. # # This source code is licensed under the MIT", "> 0 else 0.) if x[0] != 0. else float(\"inf\")) @registry.register def deceptivepath(x:", "return float(np.sum((x - 1.)**2)) @registry.register def sphere2(x: np.ndarray) -> float: \"\"\"A bit more", "possibles values. This multiplies the dimension by 5.\"\"\" return _onemax(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def", "np.ndarray) -> float: \"\"\"An extreme ill conditioned functions. Most algorithms fail on this.", "* (1.1 + np.cos(1. / x)))) @registry.register def rosenbrock(x: np.ndarray) -> float: return", "np.ndarray) -> float: dec = 3 * x**2 - (2 / (3**(x -", "-> float: \"\"\"A function which needs following a long path. Most algorithms fail", "# Deceptive part. def _styblinksitang(x: np.ndarray, noise: float) -> float: \"\"\"Classical function for", "induce delays.''' time.sleep(abs(1./x[0]) / 100000. if x[0] != 0. else 0.) return float(np.sum(x**2))", "leadingones(y: np.ndarray) -> float: \"\"\"Softmax discretization of leadingones (This multiplies the dimension by", "> 0.1: return 1. return float(distance) @registry.register def deceptivemultimodal(x: np.ndarray) -> float: \"\"\"Infinitely", "0.: return 0. angle = np.arctan(x[0] / x[1]) if x[1] != 0. else", "0. registry.register(DelayedSphere()) @registry.register def sphere(x: np.ndarray) -> float: \"\"\"The most classical continuous optimization", "/ x)))) @registry.register def rosenbrock(x: np.ndarray) -> float: return sum(100.0*(x[1:] - x[:-1]**2.0)**2.0 +", "np.ndarray) -> float: \"\"\"Leading ones, with a discretization in 2 by threshold 0", "> 0. else 0. if np.abs(np.cos(invdistance) - angle) > 0.1: return 1. return", "-> float: \"\"\"Classical function for testing noisy optimization.\"\"\" x = np.asarray(x) val =", "following functions using discretization should not be used with translation/rotation @registry.register_with_info(no_transfrom=True) def hardonemax(y:", "the input domain by ]0.5,1.5] --> 1 and 0 everywhere else. \"\"\" return", "import Registry registry = Registry[Callable[[np.ndarray], float]]() def _onemax(x: List[int]) -> float: \"\"\"onemax(x) is", "\"\"\"Translated sphere function.\"\"\" return float(np.sum((x - 1.)**2)) @registry.register def sphere2(x: np.ndarray) -> float:", "def hardonemax5(y: np.ndarray) -> float: \"\"\"Hardonemax, with a discretization by 5 with 4", "for w in x) def _leadingones(x: List[int]) -> float: \"\"\"leadingones is the second", "function. The other classical example is ellipsoid. \"\"\" return float(x[0]**2 + 1000000. *", "\"\"\"One of the Genz functions, originally used in integration, tested in optim because", "tree. import time from typing import Dict, Any, Tuple, List, Callable import numpy", "noise.\"\"\" return _styblinksitang(x, 0) @registry.register def st1(x: np.ndarray) -> float: \"\"\"Styblinksitang function with", "return float(max(np.abs(np.arctan(x[1]/x[0])), np.sqrt(x[0]**2. + x[1]**2.), 1. if x[0] > 0 else 0.) if", "+ .1)) return float(np.sum(dec)) @registry.register def altcigar(x: np.ndarray) -> float: \"\"\"Similar to cigar,", "lists of bits. It just counts the number of 1, and returns len(x)", "3 * x**2 - (2 / (3**(x - 2)**2 + .1)) return float(np.max(dec))", "with 5 possibles values. This multiplies the dimension by 5.\"\"\" return _leadingones(discretization.softmax_discretization(y, 5))", "cigar. \"\"\" return sum((10**(6 * (i - 1) / float(len(x) - 1))) *", "+ .1)) return float(np.max(dec)) @registry.register def sumdeceptive(x: np.ndarray) -> float: dec = 3", "+ noise * np.random.normal(size=val.shape)) @registry.register def delayedsphere(x: np.ndarray) -> float: '''For asynchronous experiments,", "\"\"\"The most classical continuous optimization testbed. If you do not solve that one", "genzcornerpeak(y: np.ndarray) -> float: \"\"\"One of the Genz functions, originally used in integration,", "by threshold 0 (>0 or <0).\"\"\" return _jump(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardleadingones(y: np.ndarray) ->", "(2 / (3**(x - 2)**2 + .1)) return float(np.sum(dec)) @registry.register def altcigar(x: np.ndarray)", "!= 0. else float(\"inf\")) @registry.register def deceptivepath(x: np.ndarray) -> float: \"\"\"A function which", "by 5 with 4 thresholds (quantiles of Gaussian).\"\"\" return _leadingones(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def", "* 0.5 * val + noise * np.random.normal(size=val.shape)) @registry.register def delayedsphere(x: np.ndarray) ->", "of Gaussian).\"\"\" return _leadingones(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def onemax(y: np.ndarray) -> float: \"\"\"Softmax discretization", "int(round(w)) == 1 else 0 for w in x) def _leadingones(x: List[int]) ->", "def hardjump5(y: np.ndarray) -> float: \"\"\"Jump, with a discretization by 5 with 4", "by 2).\"\"\" return _leadingones(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def onemax5(y: np.ndarray) -> float: \"\"\"Softmax discretization of", "return float(\"inf\") return value**(-len(y) - 1) @registry.register_with_info(no_transfrom=True) def minusgenzcornerpeak(y: np.ndarray) -> float: \"\"\"One", "just counts the number of 1, and returns len(x) - number of ones..", "List[int]) -> float: \"\"\"leadingones is the second most classical discrete function, adapted for", "np.ndarray) -> float: return sum(x) @registry.register def linear(x: np.ndarray) -> float: return float(np.tanh(x[0]))", "/ 4.))) @registry.register def minusgenzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One of the Genz functions,", "int(round(x_)) != 1: return len(x) - i return 0 def _jump(x: List[int]) ->", "@registry.register def deceptiveillcond(x: np.ndarray) -> float: \"\"\"An extreme ill conditioned functions. Most algorithms", "np.arctan(x[0] / x[1]) if x[1] != 0. else np.pi / 2. invdistance =", "onemax with 5 possibles values. This multiplies the dimension by 5.\"\"\" return _onemax(discretization.softmax_discretization(y,", "sum((10**(6 * (i - 1) / float(len(x) - 1))) * (x[i]**2) for i", "0. if np.abs(np.cos(invdistance) - angle) > 0.1: return 1. return float(distance) @registry.register def", "* x))) return float(10 * (len(x) - cosi) + sphere(x)) @registry.register def hm(x:", "following a long path. Most algorithms fail on this. The path becomes thiner", "why not.\"\"\" return -float(genzcornerpeak(y)) @registry.register def genzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One of the", "most classical discrete function, adapted for minimization. Returns len(x) - number of initial", "used in integration, tested in optim because why not.\"\"\" return -float(np.exp(-sum(x**2 / 4.)))", "Dict, Any, Tuple, List, Callable import numpy as np from .utils import PostponedObject", "-> float: \"\"\"Softmax discretization of onemax with 5 possibles values. This multiplies the", "ellipsoid(x: np.ndarray) -> float: \"\"\"Classical example of ill conditioned function. The other classical", "local descent does not succeed. Jumps are necessary. \"\"\" n = len(x) m", "(quantiles of Gaussian).\"\"\" return _jump(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardleadingones5(y: np.ndarray) -> float: \"\"\"Leadingones,", "of ill conditioned function. The other classical example is cigar. \"\"\" return sum((10**(6", "-> float: \"\"\"Styblinksitang function with noise 10.\"\"\" return _styblinksitang(x, 10) @registry.register def st100(x:", "distance == 0.: return 0. angle = np.arctan(x[0] / x[1]) if x[1] !=", "= 1.0 - (1.0 / (2.0 * np.sqrt(problemDimensions + 20.0) - 8.2)) mu1", "return sum(100.0*(x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0) @registry.register def griewank(x: np.ndarray) ->", "I.e. leadingones([0 1 1 1]) = 4, leadingones([1 1 1 1]) = 0,", "-> float: \"\"\"Multimodal function. Based on https://www.cs.unm.edu/~neal.holts/dga/benchmarkFunction/lunacek.html.\"\"\" problemDimensions = len(x) s = 1.0", "+ 1 * 0.5 * val + noise * np.random.normal(size=val.shape)) @registry.register def delayedsphere(x:", "of ill conditioned function. The other classical example is ellipsoid. \"\"\" return float(x[0]**2", "1000.) if x[0] != 0. else 0. registry.register(DelayedSphere()) @registry.register def sphere(x: np.ndarray) ->", "algorithms not invariant to the order of variables.\"\"\" x = y[::-1] return sum((10**(6", "hardjump5(y: np.ndarray) -> float: \"\"\"Jump, with a discretization by 5 with 4 thresholds", "we induce delays.''' time.sleep(abs(1./x[0]) / 100000. if x[0] != 0. else 0.) return", "2 distance = np.sqrt(x[0]**2 + x[1]**2) if distance == 0.: return 0. angle", "float: \"\"\"One of the Genz functions, originally used in integration, tested in optim", "float(x[0]**2 + 1000000. * np.sum(x[1:]**2)) @registry.register def altellipsoid(y: np.ndarray) -> float: \"\"\"Similar to", "in x) def _leadingones(x: List[int]) -> float: \"\"\"leadingones is the second most classical", "_styblinksitang(x, 0) @registry.register def st1(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise 1.\"\"\"", "@registry.register def altcigar(x: np.ndarray) -> float: \"\"\"Similar to cigar, but variables in inverse", "to cigar, but variables in inverse order. E.g. for pointing out algorithms not", "/ float(len(x) - 1))) * (x[i]**2) for i in range(len(x))) @registry.register def ellipsoid(x:", "function (proposed for Nevergrad).\"\"\" return float(np.sum((x**2) * (1.1 + np.cos(1. / x)))) @registry.register", "and its affiliates. All Rights Reserved. # # This source code is licensed", "in the # LICENSE file in the root directory of this source tree.", "--> 1 and 0 everywhere else. \"\"\" return len(x) - sum(1 if int(round(w))", "float: \"\"\"Softmax discretization of onemax with 5 possibles values. This multiplies the dimension", "-> float: \"\"\"Classical example of ill conditioned function. The other classical example is", "np.pi / 2. invdistance = int(1. / distance) if distance > 0. else", "= float(np.sum(np.cos(2 * np.pi * x))) return float(10 * (len(x) - cosi) +", "-> float: \"\"\"Softmax discretization of leadingones (This multiplies the dimension by 2).\"\"\" return", "1 + (float(part1)/4000.0) - float(part2) @registry.register def deceptiveillcond(x: np.ndarray) -> float: \"\"\"An extreme", "local optima, as we get closer to the optimum.\"\"\" assert len(x) >= 2", "discretization of onemax (This multiplies the dimension by 2).\"\"\" return _onemax(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def", "onemax(y: np.ndarray) -> float: \"\"\"Softmax discretization of onemax (This multiplies the dimension by", "number of ones.. It also works in the continuous case but in that", "float) -> float: \"\"\"Classical function for testing noisy optimization.\"\"\" x = np.asarray(x) val", "np.ndarray) -> float: \"\"\"Leadingones, with a discretization by 5 with 4 thresholds (quantiles", "pointing out algorithms not invariant to the order of variables.\"\"\" x = y[::-1]", "/ np.sqrt(1 + np.arange(len(x))))) return 1 + (float(part1)/4000.0) - float(part2) @registry.register def deceptiveillcond(x:", "by 5 with 4 thresholds (quantiles of Gaussian).\"\"\" return _jump(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def", "len(x) - number of ones.. It also works in the continuous case but", "-> float: \"\"\"Similar to cigar, but variables in inverse order. E.g. for pointing", "originally designed for lists of bits. It just counts the number of 1,", "4 thresholds (quantiles of Gaussian).\"\"\" return _leadingones(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def onemax(y: np.ndarray) ->", "return float(np.sum(dec)) @registry.register def altcigar(x: np.ndarray) -> float: \"\"\"Similar to cigar, but variables", "noise 1.\"\"\" return _styblinksitang(x, 1) @registry.register def st10(x: np.ndarray) -> float: \"\"\"Styblinksitang function", "1) @registry.register_with_info(no_transfrom=True) def minusgenzcornerpeak(y: np.ndarray) -> float: \"\"\"One of the Genz functions, originally", "5 with 4 thresholds (quantiles of Gaussian).\"\"\" return _jump(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardleadingones5(y:", "1))) * (x[i]**2) for i in range(len(x))) @registry.register def rastrigin(x: np.ndarray) -> float:", "-> float: return float(np.sum(x**2)) def get_postponing_delay(self, args: Tuple[Any, ...], kwargs: Dict[str, Any], value:", "a discretization by 5 with 4 thresholds (quantiles of Gaussian).\"\"\" return _jump(discretization.threshold_discretization(y, 5))", "leadingones([1 1 1 1]) = 0, leadingones([1 0 0 0]) = 1. \"\"\"", "/ 2. invdistance = int(1. / distance) if distance > 0. else 0.", "np.ndarray) -> float: \"\"\"Classical example of ill conditioned function. The other classical example", "more translated sphere function.\"\"\" return float(np.sum((x - 2.)**2)) @registry.register def sphere4(x: np.ndarray) ->", "dimension by 5.\"\"\" return _onemax(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def jump5(y: np.ndarray) -> float: \"\"\"Softmax", "== n or o <= n - m: return n - m -", "args: Tuple[Any, ...], kwargs: Dict[str, Any], value: float) -> float: x = args[0]", "why not.\"\"\" return float(np.exp(-np.sum(x**2 / 4.))) @registry.register def minusgenzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One", "in inverse order. E.g. for pointing out algorithms not invariant to the order", "not invariant to the order of variables.\"\"\" return float(x[-1]**2 + 1000000. * np.sum(x[:-1]**2))", "cases discretizes the input domain by ]0.5,1.5] --> 1 and 0 everywhere else.", "1000000. * np.sum(x[1:]**2)) @registry.register def altellipsoid(y: np.ndarray) -> float: \"\"\"Similar to Ellipsoid, but", "dimension by 2).\"\"\" return _leadingones(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def onemax5(y: np.ndarray) -> float: \"\"\"Softmax discretization", "_jump(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def leadingones(y: np.ndarray) -> float: \"\"\"Softmax discretization of leadingones (This multiplies", "\"\"\" return len(x) - sum(1 if int(round(w)) == 1 else 0 for w", "@registry.register def st1(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise 1.\"\"\" return _styblinksitang(x,", "as we get closer to the optimum.\"\"\" assert len(x) >= 2 distance =", "0.0 secondSum = 0.0 thirdSum = 0.0 for i in range(problemDimensions): firstSum +=", "return o # Deceptive part. def _styblinksitang(x: np.ndarray, noise: float) -> float: \"\"\"Classical", ".1)) return float(np.sum(dec)) @registry.register def altcigar(x: np.ndarray) -> float: \"\"\"Similar to cigar, but", "y[::-1] return sum((10**(6 * (i - 1) / float(len(x) - 1))) * (x[i]**2)", "angle) > 0.1: return 1. return float(distance) @registry.register def lunacek(x: np.ndarray) -> float:", "we are in minimization. The principle of a jump function is that local", "\"\"\" n = len(x) m = n // 4 o = n -", "x[0] != 0. else 0.) return float(np.sum(x**2)) class DelayedSphere(PostponedObject): def __call__(self, x: np.ndarray)", "sum(1 if int(round(w)) == 1 else 0 for w in x) def _leadingones(x:", "float: return sum(100.0*(x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0) @registry.register def griewank(x: np.ndarray)", "* len(x) + 1 * 0.5 * val + noise * np.random.normal(size=val.shape)) @registry.register", "= n - _onemax(x) if o == n or o <= n -", "(>0 or <0).\"\"\" return _jump(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardleadingones(y: np.ndarray) -> float: \"\"\"Leading ones,", "args[0] return float(abs(1./x[0]) / 1000.) if x[0] != 0. else 0. registry.register(DelayedSphere()) @registry.register", "initial 1. I.e. leadingones([0 1 1 1]) = 4, leadingones([1 1 1 1])", "2)**2 + .1)) return float(np.sum(dec)) @registry.register def altcigar(x: np.ndarray) -> float: \"\"\"Similar to", "_leadingones(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def onemax5(y: np.ndarray) -> float: \"\"\"Softmax discretization of onemax with 5", "function.\"\"\" cosi = float(np.sum(np.cos(2 * np.pi * x))) return float(10 * (len(x) -", "float: \"\"\"Styblinksitang function with 0 noise.\"\"\" return _styblinksitang(x, 0) @registry.register def st1(x: np.ndarray)", "or o <= n - m: return n - m - o return", "x**2 - (2 / (3**(x - 2)**2 + .1)) return float(np.sum(dec)) @registry.register def", "5)) @registry.register_with_info(no_transfrom=True) def hardleadingones5(y: np.ndarray) -> float: \"\"\"Leadingones, with a discretization by 5", "sphere function.\"\"\" return float(np.sum((x - 2.)**2)) @registry.register def sphere4(x: np.ndarray) -> float: \"\"\"Even", "\"\"\"Softmax discretization of leadingones with 5 possibles values. This multiplies the dimension by", "0.) if x[0] != 0. else float(\"inf\")) @registry.register def deceptivepath(x: np.ndarray) -> float:", "Facebook, Inc. and its affiliates. All Rights Reserved. # # This source code", "\"\"\"Multimodal function. Based on https://www.cs.unm.edu/~neal.holts/dga/benchmarkFunction/lunacek.html.\"\"\" problemDimensions = len(x) s = 1.0 - (1.0", "np.ndarray) -> float: '''For asynchronous experiments, we induce delays.''' time.sleep(abs(1./x[0]) / 100000. if", "for i in range(len(x))) @registry.register def rastrigin(x: np.ndarray) -> float: \"\"\"Classical multimodal function.\"\"\"", "does not succeed. Jumps are necessary. \"\"\" n = len(x) m = n", "!= 0. else np.pi / 2. invdistance = int(1. / distance) if distance", "noise * np.random.normal(size=val.shape)) @registry.register def delayedsphere(x: np.ndarray) -> float: '''For asynchronous experiments, we", "everywhere else. \"\"\" return len(x) - sum(1 if int(round(w)) == 1 else 0", "if distance == 0.: return 0. angle = np.arctan(x[0] / x[1]) if x[1]", "function.\"\"\" return float(np.sum((x - 4.)**2)) @registry.register def maxdeceptive(x: np.ndarray) -> float: dec =", "maxdeceptive(x: np.ndarray) -> float: dec = 3 * x**2 - (2 / (3**(x", "or <0).\"\"\" return _leadingones(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardonemax5(y: np.ndarray) -> float: \"\"\"Hardonemax, with a", "* (x[i]**2) for i in range(len(x))) @registry.register def rastrigin(x: np.ndarray) -> float: \"\"\"Classical", "example is cigar. \"\"\" return sum((10**(6 * (i - 1) / float(len(x) -", "exists variants of jump functions; we are in minimization. The principle of a", "of ones.. It also works in the continuous case but in that cases", "you do not solve that one then you have a bug.\"\"\" return float(np.sum(x**2))", "function with 0 noise.\"\"\" return _styblinksitang(x, 0) @registry.register def st1(x: np.ndarray) -> float:", "= np.sum(np.power(x, 4) - 16 * np.power(x, 2) + 5 * x) #", "from .utils import PostponedObject from ..instrumentation import discretization from ..common.decorators import Registry registry", "# TODO: docstring? \"\"\"There exists variants of jump functions; we are in minimization.", ".utils import PostponedObject from ..instrumentation import discretization from ..common.decorators import Registry registry =", "in integration, tested in optim because why not.\"\"\" value = float(1 + np.mean(np.tanh(y)))", "-> float: \"\"\"The most classical continuous optimization testbed. If you do not solve", "-> float: \"\"\"leadingones is the second most classical discrete function, adapted for minimization.", "@registry.register def ellipsoid(x: np.ndarray) -> float: \"\"\"Classical example of ill conditioned function. The", "= y[::-1] return sum((10**(6 * (i - 1) / float(len(x) - 1))) *", "by ]0.5,1.5] --> 1 and 0 everywhere else. \"\"\" return len(x) - sum(1", "multiplies the dimension by 5.\"\"\" return _jump(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def leadingones5(y: np.ndarray) ->", "1.0*problemDimensions + secondSum)+10*thirdSum # following functions using discretization should not be used with", "for i in range(problemDimensions): firstSum += (x[i]-mu1)**2 secondSum += (x[i]-mu2)**2 thirdSum += 1.0", "time.sleep(abs(1./x[0]) / 100000. if x[0] != 0. else 0.) return float(np.sum(x**2)) class DelayedSphere(PostponedObject):", "in 2 by threshold 0 (>0 or <0).\"\"\" return _leadingones(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardonemax5(y:", "1.\"\"\" return _styblinksitang(x, 1) @registry.register def st10(x: np.ndarray) -> float: \"\"\"Styblinksitang function with", "fail on this. The path becomes thiner as we get closer to the", "translated sphere function.\"\"\" return float(np.sum((x - 4.)**2)) @registry.register def maxdeceptive(x: np.ndarray) -> float:", "= n // 4 o = n - _onemax(x) if o == n", "* x**2 - (2 / (3**(x - 2)**2 + .1)) return float(np.max(dec)) @registry.register", "one then you have a bug.\"\"\" return float(np.sum(x**2)) @registry.register def sphere1(x: np.ndarray) ->", "np.sum(np.power(x, 4) - 16 * np.power(x, 2) + 5 * x) # return", "-> float: dec = 3 * x**2 - (2 / (3**(x - 2)**2", "altellipsoid(y: np.ndarray) -> float: \"\"\"Similar to Ellipsoid, but variables in inverse order. E.g.", "def jump(y: np.ndarray) -> float: \"\"\"Softmax discretization of jump (This multiplies the dimension", "discretization of onemax with 5 possibles values. This multiplies the dimension by 5.\"\"\"", "thresholds (quantiles of Gaussian).\"\"\" return _leadingones(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def onemax(y: np.ndarray) -> float:", "discretization of jump with 5 possibles values. This multiplies the dimension by 5.\"\"\"", "Inc. and its affiliates. All Rights Reserved. # # This source code is", "np.ndarray) -> float: \"\"\"A function which needs following a long path. Most algorithms", "/ (3**(x - 2)**2 + .1)) return float(np.max(dec)) @registry.register def sumdeceptive(x: np.ndarray) ->", "-> float: \"\"\"Multimodal function, often used in Bayesian optimization.\"\"\" part1 = np.sum(x**2) part2", "def _jump(x: List[int]) -> float: # TODO: docstring? \"\"\"There exists variants of jump", "functions, originally used in integration, tested in optim because why not.\"\"\" return -float(np.exp(-sum(x**2", "= (1. / distance) if distance > 0. else 0. if np.abs(np.cos(invdistance) -", "len(x) >= 2 distance = np.sqrt(x[0]**2 + x[1]**2) if distance == 0.: return", "\"\"\"Hardonemax, with a discretization by 5 with 4 thresholds (quantiles of Gaussian).\"\"\" return", "else 0. if np.abs(np.cos(invdistance) - angle) > 0.1: return 1. return float(distance) @registry.register", "with 4 thresholds (quantiles of Gaussian).\"\"\" return _jump(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardleadingones5(y: np.ndarray)", "(x[i]-mu1)**2 secondSum += (x[i]-mu2)**2 thirdSum += 1.0 - np.cos(2*np.pi*(x[i]-mu1)) return min(firstSum, 1.0*problemDimensions +", "\"\"\"There exists variants of jump functions; we are in minimization. The principle of", "return _leadingones(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def genzcornerpeak(y: np.ndarray) -> float: \"\"\"One of the Genz", "* x**2 - (2 / (3**(x - 2)**2 + .1)) return float(np.sum(dec)) @registry.register", "np.ndarray) -> float: \"\"\"The most classical continuous optimization testbed. If you do not", "10.\"\"\" return _styblinksitang(x, 10) @registry.register def st100(x: np.ndarray) -> float: \"\"\"Styblinksitang function with", "+ (float(part1)/4000.0) - float(part2) @registry.register def deceptiveillcond(x: np.ndarray) -> float: \"\"\"An extreme ill", "2. invdistance = (1. / distance) if distance > 0. else 0. if", "delayedsphere(x: np.ndarray) -> float: '''For asynchronous experiments, we induce delays.''' time.sleep(abs(1./x[0]) / 100000.", "2.5 mu2 = - np.sqrt(abs((mu1**2 - 1.0) / s)) firstSum = 0.0 secondSum", "the continuous case but in that cases discretizes the input domain by ]0.5,1.5]", "-> float: \"\"\"A bit more translated sphere function.\"\"\" return float(np.sum((x - 2.)**2)) @registry.register", "time from typing import Dict, Any, Tuple, List, Callable import numpy as np", "It also works in the continuous case but in that cases discretizes the", "we get closer to the optimum.\"\"\" assert len(x) >= 2 distance = np.sqrt(x[0]**2", "# following functions using discretization should not be used with translation/rotation @registry.register_with_info(no_transfrom=True) def", "variables.\"\"\" return float(x[-1]**2 + 1000000. * np.sum(x[:-1]**2)) @registry.register def cigar(x: np.ndarray) -> float:", "if x[1] != 0. else np.pi / 2. invdistance = int(1. / distance)", "Ellipsoid, but variables in inverse order. E.g. for pointing out algorithms not invariant", "(3**(x - 2)**2 + .1)) return float(np.max(dec)) @registry.register def sumdeceptive(x: np.ndarray) -> float:", "10) @registry.register def st100(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise 100.\"\"\" return", "-> float: \"\"\"An extreme ill conditioned functions. Most algorithms fail on this. The", "- 2)**2 + .1)) return float(np.max(dec)) @registry.register def sumdeceptive(x: np.ndarray) -> float: dec", "enumerate(list(x)): if int(round(x_)) != 1: return len(x) - i return 0 def _jump(x:", "x))) return float(10 * (len(x) - cosi) + sphere(x)) @registry.register def hm(x: np.ndarray)", "by 5.\"\"\" return _onemax(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def jump5(y: np.ndarray) -> float: \"\"\"Softmax discretization", "float: \"\"\"An extreme ill conditioned functions. Most algorithms fail on this. The condition", "this. The condition number increases to infinity as we get closer to the", "function which needs following a long path. Most algorithms fail on this. The", "(1. / distance) if distance > 0. else 0. if np.abs(np.cos(invdistance) - angle)", "+ 5 * x) # return a positive value for maximization return float(39.16599", "4.)**2)) @registry.register def maxdeceptive(x: np.ndarray) -> float: dec = 3 * x**2 -", "distance = np.sqrt(x[0]**2 + x[1]**2) if distance == 0.: return 0. angle =", "float: \"\"\"Softmax discretization of leadingones with 5 possibles values. This multiplies the dimension", "np.ndarray) -> float: \"\"\"Softmax discretization of onemax with 5 possibles values. This multiplies", "..instrumentation import discretization from ..common.decorators import Registry registry = Registry[Callable[[np.ndarray], float]]() def _onemax(x:", "len(x) >= 2 return float(max(np.abs(np.arctan(x[1]/x[0])), np.sqrt(x[0]**2. + x[1]**2.), 1. if x[0] > 0", "= 0.0 secondSum = 0.0 thirdSum = 0.0 for i in range(problemDimensions): firstSum", "+= (x[i]-mu2)**2 thirdSum += 1.0 - np.cos(2*np.pi*(x[i]-mu1)) return min(firstSum, 1.0*problemDimensions + secondSum)+10*thirdSum #", "- number of ones.. It also works in the continuous case but in", "* np.pi * x))) return float(10 * (len(x) - cosi) + sphere(x)) @registry.register", "discretization of leadingones with 5 possibles values. This multiplies the dimension by 5.\"\"\"", "secondSum += (x[i]-mu2)**2 thirdSum += 1.0 - np.cos(2*np.pi*(x[i]-mu1)) return min(firstSum, 1.0*problemDimensions + secondSum)+10*thirdSum", "np.prod(np.cos(x / np.sqrt(1 + np.arange(len(x))))) return 1 + (float(part1)/4000.0) - float(part2) @registry.register def", "_styblinksitang(x, 1) @registry.register def st10(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise 10.\"\"\"", "a positive value for maximization return float(39.16599 * len(x) + 1 * 0.5", "@registry.register_with_info(no_transfrom=True) def jump5(y: np.ndarray) -> float: \"\"\"Softmax discretization of jump with 5 possibles", "by 5.\"\"\" return _leadingones(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def genzcornerpeak(y: np.ndarray) -> float: \"\"\"One of", "def slope(x: np.ndarray) -> float: return sum(x) @registry.register def linear(x: np.ndarray) -> float:", "is licensed under the MIT license found in the # LICENSE file in", "we get closer to the optimum.\"\"\" assert len(x) >= 2 return float(max(np.abs(np.arctan(x[1]/x[0])), np.sqrt(x[0]**2.", "x) def _leadingones(x: List[int]) -> float: \"\"\"leadingones is the second most classical discrete", "\"\"\"Classical example of ill conditioned function. The other classical example is cigar. \"\"\"", "float(np.exp(-np.sum(x**2 / 4.))) @registry.register def minusgenzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One of the Genz", "Jumps are necessary. \"\"\" n = len(x) m = n // 4 o", "float: \"\"\"Classical example of ill conditioned function. The other classical example is ellipsoid.", "the dimension by 2).\"\"\" return _jump(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def leadingones(y: np.ndarray) -> float: \"\"\"Softmax", "pointing out algorithms not invariant to the order of variables.\"\"\" return float(x[-1]**2 +", "= np.arctan(x[0] / x[1]) if x[1] != 0. else np.pi / 2. invdistance", "cosi = float(np.sum(np.cos(2 * np.pi * x))) return float(10 * (len(x) - cosi)", "len(x) s = 1.0 - (1.0 / (2.0 * np.sqrt(problemDimensions + 20.0) -", "for minimization. Returns len(x) - number of initial 1. I.e. leadingones([0 1 1", "is ellipsoid. \"\"\" return float(x[0]**2 + 1000000. * np.sum(x[1:]**2)) @registry.register def altellipsoid(y: np.ndarray)", "a discretization in 2 by threshold 0 (>0 or <0).\"\"\" return _onemax(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True)", "discretization in 2 by threshold 0 (>0 or <0).\"\"\" return _leadingones(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def", "return float(np.sum((x**2) * (1.1 + np.cos(1. / x)))) @registry.register def rosenbrock(x: np.ndarray) ->", "* val + noise * np.random.normal(size=val.shape)) @registry.register def delayedsphere(x: np.ndarray) -> float: '''For", "invdistance = int(1. / distance) if distance > 0. else 0. if np.abs(np.cos(invdistance)", "necessary. \"\"\" n = len(x) m = n // 4 o = n", "return min(firstSum, 1.0*problemDimensions + secondSum)+10*thirdSum # following functions using discretization should not be", "dimension by 2).\"\"\" return _jump(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def leadingones(y: np.ndarray) -> float: \"\"\"Softmax discretization", "def get_postponing_delay(self, args: Tuple[Any, ...], kwargs: Dict[str, Any], value: float) -> float: x", "Reserved. # # This source code is licensed under the MIT license found", "is originally designed for lists of bits. It just counts the number of", "in Bayesian optimization.\"\"\" part1 = np.sum(x**2) part2 = np.prod(np.cos(x / np.sqrt(1 + np.arange(len(x)))))", "np from .utils import PostponedObject from ..instrumentation import discretization from ..common.decorators import Registry", "\"\"\"Onemax, with a discretization in 2 by threshold 0 (>0 or <0).\"\"\" return", "0 (>0 or <0).\"\"\" return _jump(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardleadingones(y: np.ndarray) -> float: \"\"\"Leading", "directory of this source tree. import time from typing import Dict, Any, Tuple,", "float: \"\"\"Classical function for testing noisy optimization.\"\"\" x = np.asarray(x) val = np.sum(np.power(x,", "of Gaussian).\"\"\" return _jump(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardleadingones5(y: np.ndarray) -> float: \"\"\"Leadingones, with", "This multiplies the dimension by 5.\"\"\" return _onemax(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def jump5(y: np.ndarray)", "-> float: return sum(x) @registry.register def linear(x: np.ndarray) -> float: return float(np.tanh(x[0])) @registry.register", "if np.abs(np.cos(invdistance) - angle) > 0.1: return 1. return float(distance) @registry.register def deceptivemultimodal(x:", "_leadingones(x: List[int]) -> float: \"\"\"leadingones is the second most classical discrete function, adapted", "-> float: \"\"\"Styblinksitang function with 0 noise.\"\"\" return _styblinksitang(x, 0) @registry.register def st1(x:", "genzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One of the Genz functions, originally used in integration,", "value == 0: return float(\"inf\") return value**(-len(y) - 1) @registry.register_with_info(no_transfrom=True) def minusgenzcornerpeak(y: np.ndarray)", "@registry.register def lunacek(x: np.ndarray) -> float: \"\"\"Multimodal function. Based on https://www.cs.unm.edu/~neal.holts/dga/benchmarkFunction/lunacek.html.\"\"\" problemDimensions =", "for maximization return float(39.16599 * len(x) + 1 * 0.5 * val +", "x = y[::-1] return sum((10**(6 * (i - 1) / float(len(x) - 1)))", "discretization in 2 by threshold 0 (>0 or <0).\"\"\" return _jump(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def", "np.arange(len(x))))) return 1 + (float(part1)/4000.0) - float(part2) @registry.register def deceptiveillcond(x: np.ndarray) -> float:", "as we get closer to the optimum.\"\"\" assert len(x) >= 2 return float(max(np.abs(np.arctan(x[1]/x[0])),", "(quantiles of Gaussian).\"\"\" return _onemax(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardjump5(y: np.ndarray) -> float: \"\"\"Jump,", "(3**(x - 2)**2 + .1)) return float(np.sum(dec)) @registry.register def altcigar(x: np.ndarray) -> float:", "float: \"\"\"Onemax, with a discretization in 2 by threshold 0 (>0 or <0).\"\"\"", "np.ndarray) -> float: \"\"\"Softmax discretization of jump with 5 possibles values. This multiplies", "- x[:-1])**2.0) @registry.register def griewank(x: np.ndarray) -> float: \"\"\"Multimodal function, often used in", "x: np.ndarray) -> float: return float(np.sum(x**2)) def get_postponing_delay(self, args: Tuple[Any, ...], kwargs: Dict[str,", "np.ndarray) -> float: return float(np.sum(x**2)) def get_postponing_delay(self, args: Tuple[Any, ...], kwargs: Dict[str, Any],", "bits. It just counts the number of 1, and returns len(x) - number", "val + noise * np.random.normal(size=val.shape)) @registry.register def delayedsphere(x: np.ndarray) -> float: '''For asynchronous", "\"\"\"Styblinksitang function with noise 10.\"\"\" return _styblinksitang(x, 10) @registry.register def st100(x: np.ndarray) ->", "possibles values. This multiplies the dimension by 5.\"\"\" return _leadingones(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def", "principle of a jump function is that local descent does not succeed. Jumps", "functions, originally used in integration, tested in optim because why not.\"\"\" return -float(genzcornerpeak(y))", "adapted to minimization. It is originally designed for lists of bits. It just", "float(np.tanh(x[0])) @registry.register def st0(x: np.ndarray) -> float: \"\"\"Styblinksitang function with 0 noise.\"\"\" return", "* np.sum(x[1:]**2)) @registry.register def altellipsoid(y: np.ndarray) -> float: \"\"\"Similar to Ellipsoid, but variables", "float(np.sum(np.cos(2 * np.pi * x))) return float(10 * (len(x) - cosi) + sphere(x))", "needs following a long path. Most algorithms fail on this. The path becomes", "path. Most algorithms fail on this. The path becomes thiner as we get", "# LICENSE file in the root directory of this source tree. import time", "Any, Tuple, List, Callable import numpy as np from .utils import PostponedObject from", "return sum(x) @registry.register def linear(x: np.ndarray) -> float: return float(np.tanh(x[0])) @registry.register def st0(x:", "2 return float(max(np.abs(np.arctan(x[1]/x[0])), np.sqrt(x[0]**2. + x[1]**2.), 1. if x[0] > 0 else 0.)", "x[1]**2.), 1. if x[0] > 0 else 0.) if x[0] != 0. else", "under the MIT license found in the # LICENSE file in the root", "+ 1000000. * np.sum(x[1:]**2)) @registry.register def altellipsoid(y: np.ndarray) -> float: \"\"\"Similar to Ellipsoid,", "this. The path becomes thiner as we get closer to the optimum.\"\"\" assert", "- sum(1 if int(round(w)) == 1 else 0 for w in x) def", "+ 20.0) - 8.2)) mu1 = 2.5 mu2 = - np.sqrt(abs((mu1**2 - 1.0)", "np.ndarray) -> float: \"\"\"Softmax discretization of leadingones (This multiplies the dimension by 2).\"\"\"", "np.ndarray) -> float: \"\"\"Styblinksitang function with 0 noise.\"\"\" return _styblinksitang(x, 0) @registry.register def", "with noise 1.\"\"\" return _styblinksitang(x, 1) @registry.register def st10(x: np.ndarray) -> float: \"\"\"Styblinksitang", "/ distance) if distance > 0. else 0. if np.abs(np.cos(invdistance) - angle) >", "optimization.\"\"\" x = np.asarray(x) val = np.sum(np.power(x, 4) - 16 * np.power(x, 2)", "-> float: return float(np.tanh(x[0])) @registry.register def st0(x: np.ndarray) -> float: \"\"\"Styblinksitang function with", "def _leadingones(x: List[int]) -> float: \"\"\"leadingones is the second most classical discrete function,", "n - m: return n - m - o return o # Deceptive", "in 2 by threshold 0 (>0 or <0).\"\"\" return _onemax(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardjump(y:", "-> float: # TODO: docstring? \"\"\"There exists variants of jump functions; we are", "dimension by 5.\"\"\" return _leadingones(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def genzcornerpeak(y: np.ndarray) -> float: \"\"\"One", "the number of 1, and returns len(x) - number of ones.. It also", "continuous case but in that cases discretizes the input domain by ]0.5,1.5] -->", "np.ndarray) -> float: \"\"\"Multimodal function, often used in Bayesian optimization.\"\"\" part1 = np.sum(x**2)", "def rosenbrock(x: np.ndarray) -> float: return sum(100.0*(x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0)", "integration, tested in optim because why not.\"\"\" value = float(1 + np.mean(np.tanh(y))) if", "the dimension by 2).\"\"\" return _onemax(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def jump(y: np.ndarray) -> float: \"\"\"Softmax", "E.g. for pointing out algorithms not invariant to the order of variables.\"\"\" return", "2 by threshold 0 (>0 or <0).\"\"\" return _onemax(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardjump(y: np.ndarray)", "by 2).\"\"\" return _jump(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def leadingones(y: np.ndarray) -> float: \"\"\"Softmax discretization of", "which needs following a long path. Most algorithms fail on this. The path", "secondSum)+10*thirdSum # following functions using discretization should not be used with translation/rotation @registry.register_with_info(no_transfrom=True)", "5 possibles values. This multiplies the dimension by 5.\"\"\" return _onemax(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True)", "with a discretization in 2 by threshold 0 (>0 or <0).\"\"\" return _leadingones(discretization.threshold_discretization(y))", "used in integration, tested in optim because why not.\"\"\" value = float(1 +", "The other classical example is cigar. \"\"\" return sum((10**(6 * (i - 1)", "sum(100.0*(x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0) @registry.register def griewank(x: np.ndarray) -> float:", "return _leadingones(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def onemax(y: np.ndarray) -> float: \"\"\"Softmax discretization of onemax", "np.ndarray) -> float: \"\"\"Onemax, with a discretization in 2 by threshold 0 (>0", "kwargs: Dict[str, Any], value: float) -> float: x = args[0] return float(abs(1./x[0]) /", "@registry.register_with_info(no_transfrom=True) def genzcornerpeak(y: np.ndarray) -> float: \"\"\"One of the Genz functions, originally used", "float(np.sum((x - 4.)**2)) @registry.register def maxdeceptive(x: np.ndarray) -> float: dec = 3 *", "np.ndarray) -> float: \"\"\"Hardonemax, with a discretization by 5 with 4 thresholds (quantiles", "with a discretization by 5 with 4 thresholds (quantiles of Gaussian).\"\"\" return _onemax(discretization.threshold_discretization(y,", "float(1 + np.mean(np.tanh(y))) if value == 0: return float(\"inf\") return value**(-len(y) - 1)", "out algorithms not invariant to the order of variables.\"\"\" x = y[::-1] return", "len(x) + 1 * 0.5 * val + noise * np.random.normal(size=val.shape)) @registry.register def", "0 noise.\"\"\" return _styblinksitang(x, 0) @registry.register def st1(x: np.ndarray) -> float: \"\"\"Styblinksitang function", "The other classical example is ellipsoid. \"\"\" return float(x[0]**2 + 1000000. * np.sum(x[1:]**2))", "case of discrete functions, adapted to minimization. It is originally designed for lists", "counts the number of 1, and returns len(x) - number of ones.. It", "to Ellipsoid, but variables in inverse order. E.g. for pointing out algorithms not", "== 1 else 0 for w in x) def _leadingones(x: List[int]) -> float:", "in optim because why not.\"\"\" return -float(genzcornerpeak(y)) @registry.register def genzgaussianpeakintegral(x: np.ndarray) -> float:", "to the optimum.\"\"\" assert len(x) >= 2 return float(max(np.abs(np.arctan(x[1]/x[0])), np.sqrt(x[0]**2. + x[1]**2.), 1.", "-> float: \"\"\"Softmax discretization of jump (This multiplies the dimension by 2).\"\"\" return", "conditioned function. The other classical example is ellipsoid. \"\"\" return float(x[0]**2 + 1000000.", "but in that cases discretizes the input domain by ]0.5,1.5] --> 1 and", "else 0.) return float(np.sum(x**2)) class DelayedSphere(PostponedObject): def __call__(self, x: np.ndarray) -> float: return", "- 1) / float(len(x) - 1))) * (x[i]**2) for i in range(len(x))) @registry.register", "function. Based on https://www.cs.unm.edu/~neal.holts/dga/benchmarkFunction/lunacek.html.\"\"\" problemDimensions = len(x) s = 1.0 - (1.0 /", "discretization by 5 with 4 thresholds (quantiles of Gaussian).\"\"\" return _onemax(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True)", "def minusgenzcornerpeak(y: np.ndarray) -> float: \"\"\"One of the Genz functions, originally used in", "np.ndarray) -> float: \"\"\"One of the Genz functions, originally used in integration, tested", "1 and 0 everywhere else. \"\"\" return len(x) - sum(1 if int(round(w)) ==", "function, adapted for minimization. Returns len(x) - number of initial 1. I.e. leadingones([0", "distance > 0. else 0. if np.abs(np.cos(invdistance) - angle) > 0.1: return 1.", "also works in the continuous case but in that cases discretizes the input", "from ..instrumentation import discretization from ..common.decorators import Registry registry = Registry[Callable[[np.ndarray], float]]() def", "values. This multiplies the dimension by 5.\"\"\" return _jump(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def leadingones5(y:", "_onemax(x: List[int]) -> float: \"\"\"onemax(x) is the most classical case of discrete functions,", "> 0.1: return 1. return float(distance) @registry.register def lunacek(x: np.ndarray) -> float: \"\"\"Multimodal", "@registry.register def deceptivepath(x: np.ndarray) -> float: \"\"\"A function which needs following a long", "case but in that cases discretizes the input domain by ]0.5,1.5] --> 1", "else 0. registry.register(DelayedSphere()) @registry.register def sphere(x: np.ndarray) -> float: \"\"\"The most classical continuous", "\"\"\" return sum((10**(6 * (i - 1) / float(len(x) - 1))) * (x[i]**2)", "optim because why not.\"\"\" value = float(1 + np.mean(np.tanh(y))) if value == 0:", "return _jump(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def leadingones5(y: np.ndarray) -> float: \"\"\"Softmax discretization of leadingones", "st0(x: np.ndarray) -> float: \"\"\"Styblinksitang function with 0 noise.\"\"\" return _styblinksitang(x, 0) @registry.register", "2. invdistance = int(1. / distance) if distance > 0. else 0. if", "thresholds (quantiles of Gaussian).\"\"\" return _jump(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardleadingones5(y: np.ndarray) -> float:", "return _styblinksitang(x, 0) @registry.register def st1(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise", "to the order of variables.\"\"\" x = y[::-1] return sum((10**(6 * (i -", "_onemax(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def jump(y: np.ndarray) -> float: \"\"\"Softmax discretization of jump (This multiplies", "x[0] != 0. else 0. registry.register(DelayedSphere()) @registry.register def sphere(x: np.ndarray) -> float: \"\"\"The", "0: return float(\"inf\") return value**(-len(y) - 1) @registry.register_with_info(no_transfrom=True) def minusgenzcornerpeak(y: np.ndarray) -> float:", "of jump with 5 possibles values. This multiplies the dimension by 5.\"\"\" return", "most classical continuous optimization testbed. If you do not solve that one then", "return _jump(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardleadingones(y: np.ndarray) -> float: \"\"\"Leading ones, with a discretization", "a discretization in 2 by threshold 0 (>0 or <0).\"\"\" return _leadingones(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True)", "Gaussian).\"\"\" return _leadingones(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def onemax(y: np.ndarray) -> float: \"\"\"Softmax discretization of", "<= n - m: return n - m - o return o #", "2).\"\"\" return _leadingones(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def onemax5(y: np.ndarray) -> float: \"\"\"Softmax discretization of onemax", "w in x) def _leadingones(x: List[int]) -> float: \"\"\"leadingones is the second most", "originally used in integration, tested in optim because why not.\"\"\" value = float(1", "-> float: \"\"\"Onemax, with a discretization in 2 by threshold 0 (>0 or", "If you do not solve that one then you have a bug.\"\"\" return", "registry.register(DelayedSphere()) @registry.register def sphere(x: np.ndarray) -> float: \"\"\"The most classical continuous optimization testbed.", "then you have a bug.\"\"\" return float(np.sum(x**2)) @registry.register def sphere1(x: np.ndarray) -> float:", "return _onemax(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardjump5(y: np.ndarray) -> float: \"\"\"Jump, with a discretization", "discretizes the input domain by ]0.5,1.5] --> 1 and 0 everywhere else. \"\"\"", "float: \"\"\"Multimodal function. Based on https://www.cs.unm.edu/~neal.holts/dga/benchmarkFunction/lunacek.html.\"\"\" problemDimensions = len(x) s = 1.0 -", "as np from .utils import PostponedObject from ..instrumentation import discretization from ..common.decorators import", "@registry.register_with_info(no_transfrom=True) def jump(y: np.ndarray) -> float: \"\"\"Softmax discretization of jump (This multiplies the", "sphere4(x: np.ndarray) -> float: \"\"\"Even more translated sphere function.\"\"\" return float(np.sum((x - 4.)**2))", "_jump(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardleadingones(y: np.ndarray) -> float: \"\"\"Leading ones, with a discretization in", "+= 1.0 - np.cos(2*np.pi*(x[i]-mu1)) return min(firstSum, 1.0*problemDimensions + secondSum)+10*thirdSum # following functions using", "@registry.register def genzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One of the Genz functions, originally used", "= np.sqrt(x[0]**2 + x[1]**2) if distance == 0.: return 0. angle = np.arctan(x[0]", "order of variables.\"\"\" return float(x[-1]**2 + 1000000. * np.sum(x[:-1]**2)) @registry.register def cigar(x: np.ndarray)", "if distance > 0. else 0. if np.abs(np.cos(invdistance) - angle) > 0.1: return", "the order of variables.\"\"\" x = y[::-1] return sum((10**(6 * (i - 1)", "np.sum(x**2) part2 = np.prod(np.cos(x / np.sqrt(1 + np.arange(len(x))))) return 1 + (float(part1)/4000.0) -", "= 1. \"\"\" for i, x_ in enumerate(list(x)): if int(round(x_)) != 1: return", "-> float: \"\"\"Softmax discretization of jump with 5 possibles values. This multiplies the", "- number of initial 1. I.e. leadingones([0 1 1 1]) = 4, leadingones([1", "griewank(x: np.ndarray) -> float: \"\"\"Multimodal function, often used in Bayesian optimization.\"\"\" part1 =", "1))) * (x[i]**2) for i in range(len(x))) @registry.register def ellipsoid(x: np.ndarray) -> float:", "registry = Registry[Callable[[np.ndarray], float]]() def _onemax(x: List[int]) -> float: \"\"\"onemax(x) is the most", "minusgenzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One of the Genz functions, originally used in integration,", "sum(x) @registry.register def linear(x: np.ndarray) -> float: return float(np.tanh(x[0])) @registry.register def st0(x: np.ndarray)", "- 2.)**2)) @registry.register def sphere4(x: np.ndarray) -> float: \"\"\"Even more translated sphere function.\"\"\"", "float(abs(1./x[0]) / 1000.) if x[0] != 0. else 0. registry.register(DelayedSphere()) @registry.register def sphere(x:", "- cosi) + sphere(x)) @registry.register def hm(x: np.ndarray) -> float: \"\"\"New multimodal function", "to minimization. It is originally designed for lists of bits. It just counts", "delays.''' time.sleep(abs(1./x[0]) / 100000. if x[0] != 0. else 0.) return float(np.sum(x**2)) class", "is that local descent does not succeed. Jumps are necessary. \"\"\" n =", "domain by ]0.5,1.5] --> 1 and 0 everywhere else. \"\"\" return len(x) -", "angle = np.arctan(x[0] / x[1]) if x[1] != 0. else np.pi / 2.", "- m: return n - m - o return o # Deceptive part.", "LICENSE file in the root directory of this source tree. import time from", "def st0(x: np.ndarray) -> float: \"\"\"Styblinksitang function with 0 noise.\"\"\" return _styblinksitang(x, 0)", "integration, tested in optim because why not.\"\"\" return float(np.exp(-np.sum(x**2 / 4.))) @registry.register def", "/ s)) firstSum = 0.0 secondSum = 0.0 thirdSum = 0.0 for i", "np.ndarray) -> float: \"\"\"Hardjump, with a discretization in 2 by threshold 0 (>0", "\"\"\"Hardjump, with a discretization in 2 by threshold 0 (>0 or <0).\"\"\" return", "classical discrete function, adapted for minimization. Returns len(x) - number of initial 1.", "== 0.: return 0. angle = np.arctan(x[0] / x[1]) if x[1] != 0.", "5 possibles values. This multiplies the dimension by 5.\"\"\" return _leadingones(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True)", "for pointing out algorithms not invariant to the order of variables.\"\"\" return float(x[-1]**2", "discretization by 5 with 4 thresholds (quantiles of Gaussian).\"\"\" return _jump(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True)", "dimension by 5.\"\"\" return _jump(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def leadingones5(y: np.ndarray) -> float: \"\"\"Softmax", "np.ndarray) -> float: \"\"\"Jump, with a discretization by 5 with 4 thresholds (quantiles", "value: float) -> float: x = args[0] return float(abs(1./x[0]) / 1000.) if x[0]", "hardleadingones5(y: np.ndarray) -> float: \"\"\"Leadingones, with a discretization by 5 with 4 thresholds", "not.\"\"\" value = float(1 + np.mean(np.tanh(y))) if value == 0: return float(\"inf\") return", "a discretization by 5 with 4 thresholds (quantiles of Gaussian).\"\"\" return _onemax(discretization.threshold_discretization(y, 5))", "0, leadingones([1 0 0 0]) = 1. \"\"\" for i, x_ in enumerate(list(x)):", "\"\"\"Softmax discretization of onemax (This multiplies the dimension by 2).\"\"\" return _onemax(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True)", "why not.\"\"\" return -float(np.exp(-sum(x**2 / 4.))) @registry.register def slope(x: np.ndarray) -> float: return", "hardonemax(y: np.ndarray) -> float: \"\"\"Onemax, with a discretization in 2 by threshold 0", "\"\"\"Multimodal function, often used in Bayesian optimization.\"\"\" part1 = np.sum(x**2) part2 = np.prod(np.cos(x", "often used in Bayesian optimization.\"\"\" part1 = np.sum(x**2) part2 = np.prod(np.cos(x / np.sqrt(1", "else np.pi / 2. invdistance = int(1. / distance) if distance > 0.", "of 1, and returns len(x) - number of ones.. It also works in", "// 4 o = n - _onemax(x) if o == n or o", "\"\"\"leadingones is the second most classical discrete function, adapted for minimization. Returns len(x)", "_styblinksitang(x: np.ndarray, noise: float) -> float: \"\"\"Classical function for testing noisy optimization.\"\"\" x", "the order of variables.\"\"\" return float(x[-1]**2 + 1000000. * np.sum(x[:-1]**2)) @registry.register def cigar(x:", "float: # TODO: docstring? \"\"\"There exists variants of jump functions; we are in", "if x[0] != 0. else float(\"inf\")) @registry.register def deceptivepath(x: np.ndarray) -> float: \"\"\"A", "m: return n - m - o return o # Deceptive part. def", "(i - 1) / float(len(x) - 1))) * (x[i]**2) for i in range(len(x)))", "It just counts the number of 1, and returns len(x) - number of", "float: \"\"\"Classical multimodal function.\"\"\" cosi = float(np.sum(np.cos(2 * np.pi * x))) return float(10", "the MIT license found in the # LICENSE file in the root directory", "...], kwargs: Dict[str, Any], value: float) -> float: x = args[0] return float(abs(1./x[0])", "distance) if distance > 0. else 0. if np.abs(np.cos(invdistance) - angle) > 0.1:", "threshold 0 (>0 or <0).\"\"\" return _jump(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardleadingones(y: np.ndarray) -> float:", "np.sum(x[1:]**2)) @registry.register def altellipsoid(y: np.ndarray) -> float: \"\"\"Similar to Ellipsoid, but variables in", "np.ndarray) -> float: \"\"\"Softmax discretization of leadingones with 5 possibles values. This multiplies", "def sphere1(x: np.ndarray) -> float: \"\"\"Translated sphere function.\"\"\" return float(np.sum((x - 1.)**2)) @registry.register", "with a discretization in 2 by threshold 0 (>0 or <0).\"\"\" return _onemax(discretization.threshold_discretization(y))", "slope(x: np.ndarray) -> float: return sum(x) @registry.register def linear(x: np.ndarray) -> float: return", "the # LICENSE file in the root directory of this source tree. import", "rosenbrock(x: np.ndarray) -> float: return sum(100.0*(x[1:] - x[:-1]**2.0)**2.0 + (1 - x[:-1])**2.0) @registry.register", "o == n or o <= n - m: return n - m", "0. else 0. if np.abs(np.cos(invdistance) - angle) > 0.1: return 1. return float(distance)", "/ 2. invdistance = (1. / distance) if distance > 0. else 0.", "class DelayedSphere(PostponedObject): def __call__(self, x: np.ndarray) -> float: return float(np.sum(x**2)) def get_postponing_delay(self, args:", "works in the continuous case but in that cases discretizes the input domain", "@registry.register def hm(x: np.ndarray) -> float: \"\"\"New multimodal function (proposed for Nevergrad).\"\"\" return", "float: \"\"\"Hardonemax, with a discretization by 5 with 4 thresholds (quantiles of Gaussian).\"\"\"", "of a jump function is that local descent does not succeed. Jumps are", "float) -> float: x = args[0] return float(abs(1./x[0]) / 1000.) if x[0] !=", "0]) = 1. \"\"\" for i, x_ in enumerate(list(x)): if int(round(x_)) != 1:", "and returns len(x) - number of ones.. It also works in the continuous", "value = float(1 + np.mean(np.tanh(y))) if value == 0: return float(\"inf\") return value**(-len(y)", "np.sqrt(x[0]**2 + x[1]**2) if distance == 0.: return 0. angle = np.arctan(x[0] /", "x**2 - (2 / (3**(x - 2)**2 + .1)) return float(np.max(dec)) @registry.register def", "np.ndarray) -> float: \"\"\"Translated sphere function.\"\"\" return float(np.sum((x - 1.)**2)) @registry.register def sphere2(x:", "@registry.register_with_info(no_transfrom=True) def onemax(y: np.ndarray) -> float: \"\"\"Softmax discretization of onemax (This multiplies the", "with a discretization by 5 with 4 thresholds (quantiles of Gaussian).\"\"\" return _jump(discretization.threshold_discretization(y,", "else 0 for w in x) def _leadingones(x: List[int]) -> float: \"\"\"leadingones is", "def minusgenzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One of the Genz functions, originally used in", "float: \"\"\"A bit more translated sphere function.\"\"\" return float(np.sum((x - 2.)**2)) @registry.register def", "Tuple, List, Callable import numpy as np from .utils import PostponedObject from ..instrumentation", "by threshold 0 (>0 or <0).\"\"\" return _onemax(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardjump(y: np.ndarray) ->", "Gaussian).\"\"\" return _jump(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardleadingones5(y: np.ndarray) -> float: \"\"\"Leadingones, with a", "0. else np.pi / 2. invdistance = (1. / distance) if distance >", "thirdSum += 1.0 - np.cos(2*np.pi*(x[i]-mu1)) return min(firstSum, 1.0*problemDimensions + secondSum)+10*thirdSum # following functions", "part1 = np.sum(x**2) part2 = np.prod(np.cos(x / np.sqrt(1 + np.arange(len(x))))) return 1 +", "for testing noisy optimization.\"\"\" x = np.asarray(x) val = np.sum(np.power(x, 4) - 16", "+ secondSum)+10*thirdSum # following functions using discretization should not be used with translation/rotation", "should not be used with translation/rotation @registry.register_with_info(no_transfrom=True) def hardonemax(y: np.ndarray) -> float: \"\"\"Onemax,", "x = args[0] return float(abs(1./x[0]) / 1000.) if x[0] != 0. else 0.", "because why not.\"\"\" value = float(1 + np.mean(np.tanh(y))) if value == 0: return", "# # This source code is licensed under the MIT license found in", "return _onemax(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def jump(y: np.ndarray) -> float: \"\"\"Softmax discretization of jump (This", "function. The other classical example is cigar. \"\"\" return sum((10**(6 * (i -", "is cigar. \"\"\" return sum((10**(6 * (i - 1) / float(len(x) - 1)))", "ellipsoid. \"\"\" return float(x[0]**2 + 1000000. * np.sum(x[1:]**2)) @registry.register def altellipsoid(y: np.ndarray) ->", "def genzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One of the Genz functions, originally used in", "by threshold 0 (>0 or <0).\"\"\" return _leadingones(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardonemax5(y: np.ndarray) ->", "-> float: \"\"\"Softmax discretization of leadingones with 5 possibles values. This multiplies the", "2)**2 + .1)) return float(np.max(dec)) @registry.register def sumdeceptive(x: np.ndarray) -> float: dec =", "List[int]) -> float: # TODO: docstring? \"\"\"There exists variants of jump functions; we", "a bug.\"\"\" return float(np.sum(x**2)) @registry.register def sphere1(x: np.ndarray) -> float: \"\"\"Translated sphere function.\"\"\"", "float: \"\"\"New multimodal function (proposed for Nevergrad).\"\"\" return float(np.sum((x**2) * (1.1 + np.cos(1.", "@registry.register def slope(x: np.ndarray) -> float: return sum(x) @registry.register def linear(x: np.ndarray) ->", "+ x[1]**2.), 1. if x[0] > 0 else 0.) if x[0] != 0.", "(This multiplies the dimension by 2).\"\"\" return _onemax(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def jump(y: np.ndarray) ->", "(This multiplies the dimension by 2).\"\"\" return _leadingones(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def onemax5(y: np.ndarray) ->", "extreme ill conditioned functions. Most algorithms fail on this. The condition number increases", "part2 = np.prod(np.cos(x / np.sqrt(1 + np.arange(len(x))))) return 1 + (float(part1)/4000.0) - float(part2)", "The path becomes thiner as we get closer to the optimum.\"\"\" assert len(x)", "x[1]) if x[1] != 0. else np.pi / 2. invdistance = (1. /", "\"\"\"Softmax discretization of onemax with 5 possibles values. This multiplies the dimension by", "Dict[str, Any], value: float) -> float: x = args[0] return float(abs(1./x[0]) / 1000.)", "return float(np.sum(x**2)) def get_postponing_delay(self, args: Tuple[Any, ...], kwargs: Dict[str, Any], value: float) ->", "deceptiveillcond(x: np.ndarray) -> float: \"\"\"An extreme ill conditioned functions. Most algorithms fail on", "/ float(len(x) - 1))) * (x[i]**2) for i in range(len(x))) @registry.register def rastrigin(x:", "+ np.arange(len(x))))) return 1 + (float(part1)/4000.0) - float(part2) @registry.register def deceptiveillcond(x: np.ndarray) ->", "get_postponing_delay(self, args: Tuple[Any, ...], kwargs: Dict[str, Any], value: float) -> float: x =", "float(np.sum(x**2)) @registry.register def sphere1(x: np.ndarray) -> float: \"\"\"Translated sphere function.\"\"\" return float(np.sum((x -", "a long path. Most algorithms fail on this. The path becomes thiner as", "(x[i]**2) for i in range(len(x))) @registry.register def rastrigin(x: np.ndarray) -> float: \"\"\"Classical multimodal", "multiplies the dimension by 5.\"\"\" return _leadingones(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def genzcornerpeak(y: np.ndarray) ->", "sphere function.\"\"\" return float(np.sum((x - 1.)**2)) @registry.register def sphere2(x: np.ndarray) -> float: \"\"\"A", "used in Bayesian optimization.\"\"\" part1 = np.sum(x**2) part2 = np.prod(np.cos(x / np.sqrt(1 +", "@registry.register def minusgenzgaussianpeakintegral(x: np.ndarray) -> float: \"\"\"One of the Genz functions, originally used", "(x[i]**2) for i in range(len(x))) @registry.register def ellipsoid(x: np.ndarray) -> float: \"\"\"Classical example", "1 1]) = 0, leadingones([1 0 0 0]) = 1. \"\"\" for i,", "- float(part2) @registry.register def deceptiveillcond(x: np.ndarray) -> float: \"\"\"An extreme ill conditioned functions.", "-> float: \"\"\"Jump, with a discretization by 5 with 4 thresholds (quantiles of", "@registry.register_with_info(no_transfrom=True) def hardonemax5(y: np.ndarray) -> float: \"\"\"Hardonemax, with a discretization by 5 with", "1 * 0.5 * val + noise * np.random.normal(size=val.shape)) @registry.register def delayedsphere(x: np.ndarray)", "if int(round(x_)) != 1: return len(x) - i return 0 def _jump(x: List[int])", "return 1. return float(distance) @registry.register def lunacek(x: np.ndarray) -> float: \"\"\"Multimodal function. Based", "discretization from ..common.decorators import Registry registry = Registry[Callable[[np.ndarray], float]]() def _onemax(x: List[int]) ->", "float(max(np.abs(np.arctan(x[1]/x[0])), np.sqrt(x[0]**2. + x[1]**2.), 1. if x[0] > 0 else 0.) if x[0]", "5)) @registry.register_with_info(no_transfrom=True) def onemax(y: np.ndarray) -> float: \"\"\"Softmax discretization of onemax (This multiplies", "return float(np.sum((x - 4.)**2)) @registry.register def maxdeceptive(x: np.ndarray) -> float: dec = 3", "_leadingones(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def genzcornerpeak(y: np.ndarray) -> float: \"\"\"One of the Genz functions,", "float: \"\"\"Softmax discretization of jump with 5 possibles values. This multiplies the dimension", "0.) return float(np.sum(x**2)) class DelayedSphere(PostponedObject): def __call__(self, x: np.ndarray) -> float: return float(np.sum(x**2))", "def hardleadingones5(y: np.ndarray) -> float: \"\"\"Leadingones, with a discretization by 5 with 4", "n = len(x) m = n // 4 o = n - _onemax(x)", "def sphere(x: np.ndarray) -> float: \"\"\"The most classical continuous optimization testbed. If you", "threshold 0 (>0 or <0).\"\"\" return _leadingones(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardonemax5(y: np.ndarray) -> float:", "-> float: \"\"\"Hardonemax, with a discretization by 5 with 4 thresholds (quantiles of", "o return o # Deceptive part. def _styblinksitang(x: np.ndarray, noise: float) -> float:", "- 1))) * (x[i]**2) for i in range(len(x))) @registry.register def ellipsoid(x: np.ndarray) ->", "np.ndarray) -> float: \"\"\"Similar to Ellipsoid, but variables in inverse order. E.g. for", "path becomes thiner as we get closer to the optimum.\"\"\" assert len(x) >=", "0. else 0.) return float(np.sum(x**2)) class DelayedSphere(PostponedObject): def __call__(self, x: np.ndarray) -> float:", "do not solve that one then you have a bug.\"\"\" return float(np.sum(x**2)) @registry.register", "- (2 / (3**(x - 2)**2 + .1)) return float(np.sum(dec)) @registry.register def altcigar(x:", "of onemax (This multiplies the dimension by 2).\"\"\" return _onemax(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def jump(y:", "from typing import Dict, Any, Tuple, List, Callable import numpy as np from", "float: \"\"\"Even more translated sphere function.\"\"\" return float(np.sum((x - 4.)**2)) @registry.register def maxdeceptive(x:", "docstring? \"\"\"There exists variants of jump functions; we are in minimization. The principle", "return float(np.sum((x - 2.)**2)) @registry.register def sphere4(x: np.ndarray) -> float: \"\"\"Even more translated", "@registry.register def altellipsoid(y: np.ndarray) -> float: \"\"\"Similar to Ellipsoid, but variables in inverse", "2) + 5 * x) # return a positive value for maximization return", "float(distance) @registry.register def lunacek(x: np.ndarray) -> float: \"\"\"Multimodal function. Based on https://www.cs.unm.edu/~neal.holts/dga/benchmarkFunction/lunacek.html.\"\"\" problemDimensions", "jump(y: np.ndarray) -> float: \"\"\"Softmax discretization of jump (This multiplies the dimension by", "float(distance) @registry.register def deceptivemultimodal(x: np.ndarray) -> float: \"\"\"Infinitely many local optima, as we", "cosi) + sphere(x)) @registry.register def hm(x: np.ndarray) -> float: \"\"\"New multimodal function (proposed", "in optim because why not.\"\"\" return -float(np.exp(-sum(x**2 / 4.))) @registry.register def slope(x: np.ndarray)", "1: return len(x) - i return 0 def _jump(x: List[int]) -> float: #", "_jump(x: List[int]) -> float: # TODO: docstring? \"\"\"There exists variants of jump functions;", "else np.pi / 2. invdistance = (1. / distance) if distance > 0.", "-> float: \"\"\"Leadingones, with a discretization by 5 with 4 thresholds (quantiles of", "jump5(y: np.ndarray) -> float: \"\"\"Softmax discretization of jump with 5 possibles values. This", "most classical case of discrete functions, adapted to minimization. It is originally designed", "function is that local descent does not succeed. Jumps are necessary. \"\"\" n", "<0).\"\"\" return _jump(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardleadingones(y: np.ndarray) -> float: \"\"\"Leading ones, with a", "of discrete functions, adapted to minimization. It is originally designed for lists of", "len(x) - number of initial 1. I.e. leadingones([0 1 1 1]) = 4,", "if o == n or o <= n - m: return n -", "4 thresholds (quantiles of Gaussian).\"\"\" return _jump(discretization.threshold_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def hardleadingones5(y: np.ndarray) ->", "def jump5(y: np.ndarray) -> float: \"\"\"Softmax discretization of jump with 5 possibles values.", "(1.1 + np.cos(1. / x)))) @registry.register def rosenbrock(x: np.ndarray) -> float: return sum(100.0*(x[1:]", "closer to the optimum.\"\"\" assert len(x) >= 2 distance = np.sqrt(x[0]**2 + x[1]**2)", "np.pi * x))) return float(10 * (len(x) - cosi) + sphere(x)) @registry.register def", "3 * x**2 - (2 / (3**(x - 2)**2 + .1)) return float(np.sum(dec))", "if x[0] > 0 else 0.) if x[0] != 0. else float(\"inf\")) @registry.register", "float: dec = 3 * x**2 - (2 / (3**(x - 2)**2 +", "if int(round(w)) == 1 else 0 for w in x) def _leadingones(x: List[int])", "for Nevergrad).\"\"\" return float(np.sum((x**2) * (1.1 + np.cos(1. / x)))) @registry.register def rosenbrock(x:", "16 * np.power(x, 2) + 5 * x) # return a positive value", "for lists of bits. It just counts the number of 1, and returns", "= 3 * x**2 - (2 / (3**(x - 2)**2 + .1)) return", "1. I.e. leadingones([0 1 1 1]) = 4, leadingones([1 1 1 1]) =", "optimum.\"\"\" assert len(x) >= 2 return float(max(np.abs(np.arctan(x[1]/x[0])), np.sqrt(x[0]**2. + x[1]**2.), 1. if x[0]", "@registry.register def st10(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise 10.\"\"\" return _styblinksitang(x,", "<reponame>akhti/nevergrad<filename>nevergrad/functions/corefuncs.py<gh_stars>1-10 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. #", "in range(len(x))) @registry.register def rastrigin(x: np.ndarray) -> float: \"\"\"Classical multimodal function.\"\"\" cosi =", "- np.cos(2*np.pi*(x[i]-mu1)) return min(firstSum, 1.0*problemDimensions + secondSum)+10*thirdSum # following functions using discretization should", "- 8.2)) mu1 = 2.5 mu2 = - np.sqrt(abs((mu1**2 - 1.0) / s))", "def onemax5(y: np.ndarray) -> float: \"\"\"Softmax discretization of onemax with 5 possibles values.", "values. This multiplies the dimension by 5.\"\"\" return _onemax(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def jump5(y:", "n or o <= n - m: return n - m - o", "@registry.register def cigar(x: np.ndarray) -> float: \"\"\"Classical example of ill conditioned function. The", "the dimension by 5.\"\"\" return _leadingones(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def genzcornerpeak(y: np.ndarray) -> float:", "x[1]**2) if distance == 0.: return 0. angle = np.arctan(x[0] / x[1]) if", "float: \"\"\"Similar to Ellipsoid, but variables in inverse order. E.g. for pointing out", "discrete function, adapted for minimization. Returns len(x) - number of initial 1. I.e.", "# return a positive value for maximization return float(39.16599 * len(x) + 1", "return float(np.sum(x**2)) class DelayedSphere(PostponedObject): def __call__(self, x: np.ndarray) -> float: return float(np.sum(x**2)) def", "the second most classical discrete function, adapted for minimization. Returns len(x) - number", "from ..common.decorators import Registry registry = Registry[Callable[[np.ndarray], float]]() def _onemax(x: List[int]) -> float:", "st1(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise 1.\"\"\" return _styblinksitang(x, 1) @registry.register", "- 16 * np.power(x, 2) + 5 * x) # return a positive", "the dimension by 5.\"\"\" return _onemax(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def jump5(y: np.ndarray) -> float:", "multiplies the dimension by 2).\"\"\" return _leadingones(discretization.softmax_discretization(y)) @registry.register_with_info(no_transfrom=True) def onemax5(y: np.ndarray) -> float:", "+ np.cos(1. / x)))) @registry.register def rosenbrock(x: np.ndarray) -> float: return sum(100.0*(x[1:] -", "o <= n - m: return n - m - o return o", "Registry registry = Registry[Callable[[np.ndarray], float]]() def _onemax(x: List[int]) -> float: \"\"\"onemax(x) is the", "in integration, tested in optim because why not.\"\"\" return -float(genzcornerpeak(y)) @registry.register def genzgaussianpeakintegral(x:", "float(np.sum((x**2) * (1.1 + np.cos(1. / x)))) @registry.register def rosenbrock(x: np.ndarray) -> float:", "minusgenzcornerpeak(y: np.ndarray) -> float: \"\"\"One of the Genz functions, originally used in integration,", "5)) @registry.register_with_info(no_transfrom=True) def leadingones5(y: np.ndarray) -> float: \"\"\"Softmax discretization of leadingones with 5", "float: \"\"\"The most classical continuous optimization testbed. If you do not solve that", "of leadingones with 5 possibles values. This multiplies the dimension by 5.\"\"\" return", "return float(np.tanh(x[0])) @registry.register def st0(x: np.ndarray) -> float: \"\"\"Styblinksitang function with 0 noise.\"\"\"", "def lunacek(x: np.ndarray) -> float: \"\"\"Multimodal function. Based on https://www.cs.unm.edu/~neal.holts/dga/benchmarkFunction/lunacek.html.\"\"\" problemDimensions = len(x)", "\"\"\"Even more translated sphere function.\"\"\" return float(np.sum((x - 4.)**2)) @registry.register def maxdeceptive(x: np.ndarray)", "x[0] != 0. else float(\"inf\")) @registry.register def deceptivepath(x: np.ndarray) -> float: \"\"\"A function", "np.ndarray) -> float: \"\"\"Infinitely many local optima, as we get closer to the", "number of initial 1. I.e. leadingones([0 1 1 1]) = 4, leadingones([1 1", "min(firstSum, 1.0*problemDimensions + secondSum)+10*thirdSum # following functions using discretization should not be used", "@registry.register_with_info(no_transfrom=True) def hardleadingones5(y: np.ndarray) -> float: \"\"\"Leadingones, with a discretization by 5 with", "\"\"\"Leadingones, with a discretization by 5 with 4 thresholds (quantiles of Gaussian).\"\"\" return", "@registry.register def st100(x: np.ndarray) -> float: \"\"\"Styblinksitang function with noise 100.\"\"\" return _styblinksitang(x,", "example is ellipsoid. \"\"\" return float(x[0]**2 + 1000000. * np.sum(x[1:]**2)) @registry.register def altellipsoid(y:", "(2 / (3**(x - 2)**2 + .1)) return float(np.max(dec)) @registry.register def sumdeceptive(x: np.ndarray)", "(>0 or <0).\"\"\" return _leadingones(discretization.threshold_discretization(y)) @registry.register_with_info(no_transfrom=True) def hardonemax5(y: np.ndarray) -> float: \"\"\"Hardonemax, with", "float: \"\"\"Leadingones, with a discretization by 5 with 4 thresholds (quantiles of Gaussian).\"\"\"", "https://www.cs.unm.edu/~neal.holts/dga/benchmarkFunction/lunacek.html.\"\"\" problemDimensions = len(x) s = 1.0 - (1.0 / (2.0 * np.sqrt(problemDimensions", "1 1]) = 4, leadingones([1 1 1 1]) = 0, leadingones([1 0 0", "using discretization should not be used with translation/rotation @registry.register_with_info(no_transfrom=True) def hardonemax(y: np.ndarray) ->", "asynchronous experiments, we induce delays.''' time.sleep(abs(1./x[0]) / 100000. if x[0] != 0. else", "@registry.register_with_info(no_transfrom=True) def leadingones(y: np.ndarray) -> float: \"\"\"Softmax discretization of leadingones (This multiplies the", "1.0 - (1.0 / (2.0 * np.sqrt(problemDimensions + 20.0) - 8.2)) mu1 =", "_jump(discretization.softmax_discretization(y, 5)) @registry.register_with_info(no_transfrom=True) def leadingones5(y: np.ndarray) -> float: \"\"\"Softmax discretization of leadingones with", "float: \"\"\"A function which needs following a long path. Most algorithms fail on", "np.sqrt(x[0]**2. + x[1]**2.), 1. if x[0] > 0 else 0.) if x[0] !=", "mu1 = 2.5 mu2 = - np.sqrt(abs((mu1**2 - 1.0) / s)) firstSum =", "def maxdeceptive(x: np.ndarray) -> float: dec = 3 * x**2 - (2 /", "Nevergrad).\"\"\" return float(np.sum((x**2) * (1.1 + np.cos(1. / x)))) @registry.register def rosenbrock(x: np.ndarray)", "(1.0 / (2.0 * np.sqrt(problemDimensions + 20.0) - 8.2)) mu1 = 2.5 mu2", "affiliates. All Rights Reserved. # # This source code is licensed under the", "not succeed. Jumps are necessary. \"\"\" n = len(x) m = n //", "float(\"inf\") return value**(-len(y) - 1) @registry.register_with_info(no_transfrom=True) def minusgenzcornerpeak(y: np.ndarray) -> float: \"\"\"One of", "optim because why not.\"\"\" return -float(np.exp(-sum(x**2 / 4.))) @registry.register def slope(x: np.ndarray) ->", "np.cos(2*np.pi*(x[i]-mu1)) return min(firstSum, 1.0*problemDimensions + secondSum)+10*thirdSum # following functions using discretization should not", "np.ndarray) -> float: return float(np.tanh(x[0])) @registry.register def st0(x: np.ndarray) -> float: \"\"\"Styblinksitang function", "x) # return a positive value for maximization return float(39.16599 * len(x) +" ]
[]
[ "check_if_verify(): \"\"\" This function checks if the account has been flagged for manual", "= randint(1, 5) print( \"Sleeping for \" + str(randinteger) + \" seconds due", "if the bot needs to input the shipping information if the user has", "Select from selenium.common.exceptions import NoSuchElementException from random import randint from time import sleep", "guest = driver.find_element_by_css_selector('.cia-guest-content__continue') guest.click() def sign_in_and_click_button(): \"\"\" This function types the supplied email", "to input the shipping information if the user has been signed in using", "WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") email =", "functions :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, 3).until(element_present) except", "driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[3]/label[1]/div[2]/div[1]/div[1]/input[1]\") for i in range(len(json['address'])): address.send_keys(json['address'][i]) print(\"street address typed\") city = driver.find_element_by_xpath(", "= driver.find_element_by_id('user.emailAddress') email.send_keys(json['email']) phone = driver.find_element_by_id('user.phone') phone.send_keys(json['phone']) def check_if_payment_info_on_page(): \"\"\" This function checks", "checks if the bot must enter payment information on the current page :rtype:", "WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") add_to_cart_button =", "Navigates to the URL supplied + the product URL \"\"\" driver.get(url + json['url'])", "for page to load\") fname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.firstName\") fname.send_keys(json['first_name']) print(\"fname typed\") lname =", "place_order time_end = time.time() time_diff = time_end - time_start webhook.send( \"@everyone Purchased, Time", "'r')) webhook = Webhook.from_url( json['discord_webook'], adapter=RequestsWebhookAdapter()) # Creates webhook using discord url driver", ":rtype: None Type \"\"\" try: not_sold_out = driver.find_element_by_css_selector( 'button.btn-primary:nth-child(1)') except NoSuchElementException: return False", "currently and it throws a NoSuchElementException. :return: Returns True for in stock and", "page, then types the correct zip code for shipping, and then clicks update", "of the user if they have selected Guest checkout :rtype: object \"\"\" try:", "driver.find_element_by_css_selector( \"#location\") zip_code_change.send_keys(json['zip_code']) update = driver.find_element_by_css_selector( '#item-availability-links > button:nth-child(3)') update.click() print(\"changed zip code\")", "checks if the account has been flagged for manual user verification :rtype: object", "URL \"\"\" driver.get(url + json['url']) def check_if_in_stock(): \"\"\" This function tries to find", "password = driver.find_element_by_id(\"fld-p1\") password.send_keys(json['password']) print(\"password typed\") button = driver.find_element_by_css_selector( '.cia-form__controls__submit') button.click() print(\"signed in\")", "\" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: if check_if_payment_info_on_page() is False:", "Your Account\" in verify: return False else: return True except NoSuchElementException: return False", "9).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") guest = driver.find_element_by_css_selector('.cia-guest-content__continue')", "code address section typed\") def input_phone_and_email(): \"\"\" This function inputs the phone number", "element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for", "cart\") def navigate_to_cart(): \"\"\" This function navigates to the BestBuy cart page \"\"\"", "the bot needs to input the shipping information if the user has been", "object \"\"\" button = driver.find_element_by_css_selector( '.btn-lg') button.click() def input_payment_info(): \"\"\" This function inputs", "timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") email = driver.find_element_by_id(\"fld-e\")", "email = driver.find_element_by_id('user.emailAddress') email.send_keys(json['email']) phone = driver.find_element_by_id('user.phone') phone.send_keys(json['phone']) def check_if_payment_info_on_page(): \"\"\" This function", "\"\"\" try: not_sold_out = driver.find_element_by_css_selector( 'button.btn-primary:nth-child(1)') except NoSuchElementException: return False return True def", "This function places the order by clicking the final button :rtype: object \"\"\"", "and email that the user has provided if they are checking out as", "check_if_in_stock() if not y: in_stock = 0 randinteger = randint(1, 5) print( \"Sleeping", "location. :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.change-zipcode-link')) WebDriverWait(driver, 10).until(element_present) except", "False: click_continue_to_payment_info() input_payment_info() # place_order() time_end = time.time() time_diff = time_end - time_start", "= 0 time_end = 0 if purchased.strip() == \"0\": in_stock = 0 while", "'consolidatedAddresses.ui_address_2.zipcode') zip_code.send_keys(json['zip_code']) print(\"zip code address section typed\") def input_shipping_info_guest(): \"\"\" This function inputs", "This function first selects the ZipCode element on the cart page, then types", "first selects the ZipCode element on the cart page, then types the correct", "out of stock currently and it throws a NoSuchElementException. :return: Returns True for", "== \"sign-in\": sign_in_and_click_button() if not check_if_verify(): quit(0) if check_if_shipping_info_needed() is True: input_shipping_information() if", "elapsed: \" + str(time_diff) + \" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close()", "city = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['city'])): city.send_keys(json['city'][i]) print(\"city typed\") select =", "'w') json2.write('1') json2.close() else: if check_if_payment_info_on_page() is False: click_continue_to_payment_info() input_payment_info() # place_order() time_end", "selenium.webdriver.support.ui import Select from selenium.common.exceptions import NoSuchElementException from random import randint from time", "Creates WebDriver instance url = \"https://www.bestbuy.com\" timeout = 3 # Timeout for element", "= driver.find_element_by_css_selector( \".change-zipcode-link\") zip_code_click.send_keys(Keys.ENTER) print(\"clicked on zip code\") zip_code_change = driver.find_element_by_css_selector( \"#location\") zip_code_change.send_keys(json['zip_code'])", "and it throws a NoSuchElementException. :return: Returns True for in stock and False", ":rtype: object \"\"\" button = driver.find_element_by_css_selector( '.btn-lg') button.click() def main(guest_or_sign_in): time_start = 0", "print(\"password typed\") button = driver.find_element_by_css_selector( '.cia-form__controls__submit') button.click() print(\"signed in\") def check_if_verify(): \"\"\" This", "it means it is out of stock currently and it throws a NoSuchElementException.", "button.click() print(\"signed in\") def check_if_verify(): \"\"\" This function checks if the account has", "element loaded checks purchased = open('purchased.txt', 'r').read() def navigate_to_bb(): \"\"\" * Navigates to", "True def click_continue_to_payment_info(): \"\"\" This function clicks the continue to payment information if", "city = driver.find_element_by_id(\"consolidatedAddresses.ui_address_2.city\") city.send_keys(json['city']) print(\"city typed\") select = Select(driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.state')) select.select_by_visible_text(json['state']) print(\"state selected\")", "the order by clicking the final button :rtype: object \"\"\" button = driver.find_element_by_css_selector(", "email.send_keys(json['email']) print(\"email typed\") password = driver.find_element_by_id(\"fld-p1\") password.send_keys(json['password']) print(\"password typed\") button = driver.find_element_by_css_selector( '.cia-form__controls__submit')", "This function types the supplied email and password and then clicks the Sign", "\" + str(time_diff) + \" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else:", "= open('purchased.txt', 'w') json2.write('1') json2.close() else: input_payment_info() # place_order time_end = time.time() time_diff", "been flagged for manual user verification :rtype: object \"\"\" try: verify = driver.find_element_by_css_selector(", "print(\"signed in\") def check_if_verify(): \"\"\" This function checks if the account has been", "= driver.find_element_by_css_selector( \"button.btn-primary:nth-child(1)\") add_to_cart_button.click() print(\"added to cart\") def navigate_to_cart(): \"\"\" This function navigates", "== 0: navigate_to_product() driver.implicitly_wait(0.3) y = check_if_in_stock() if not y: in_stock = 0", "it does not find it, it means it is out of stock currently", "click_checkout_key(): \"\"\" This function clicks the checkout button on the BestBuy cart page", "= EC.presence_of_element_located( (By.CSS_SELECTOR, '.change-zipcode-link')) WebDriverWait(driver, 10).until(element_present) except TimeoutException: print(\"Timed out waiting for page", "find it, it means it is out of stock currently and it throws", "function clicks the checkout button on the BestBuy cart page :rtype: object \"\"\"", "element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-guest-content__continue')) WebDriverWait(driver, 9).until(element_present) except TimeoutException: print(\"Timed out waiting for", "suggestions.text: suggestions.click() print(\"suggestions removed\") address = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.street\") address.send_keys(json['address']) print(\"street address typed\") city", "open('purchased.txt', 'w') json2.write('1') json2.close() elif guest_or_sign_in == \"guest\": select_guest_checkout() # driver.refresh() input_shipping_info_guest() input_phone_and_email()", "tries to find the Add To Cart button, if it does not find", "NoSuchElementException: return False # return True def check_if_shipping_info_needed(): \"\"\" This function checks to", "section typed\") def input_phone_and_email(): \"\"\" This function inputs the phone number and email", ":rtype: object \"\"\" email = driver.find_element_by_id('user.emailAddress') email.send_keys(json['email']) phone = driver.find_element_by_id('user.phone') phone.send_keys(json['phone']) def check_if_payment_info_on_page():", "\"\"\" This function checks if the bot must enter payment information on the", "function inputs the phone number and email that the user has provided if", "print(\"month selected\") select = Select(driver.find_element_by_name( 'expiration-year')) select.select_by_visible_text(json['year']) print(\"year selected\") cvv = driver.find_element_by_css_selector('#credit-card-cvv') cvv.send_keys(json['cvv'])", "click_checkout_key() if guest_or_sign_in == \"sign-in\": sign_in_and_click_button() if not check_if_verify(): quit(0) if check_if_shipping_info_needed() is", "checkout button on the BestBuy cart page :rtype: object \"\"\" checkout_button = driver.find_element_by_css_selector(", "to see if the bot needs to input the shipping information if the", "inputs the phone number and email that the user has provided if they", "False return True def input_shipping_information(): \"\"\" This function inputs the shipping information that", "following the BestBuy cart :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-guest-content__continue'))", "cvv = driver.find_element_by_id('credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV added\") def input_payment_info_guest(): \"\"\" This function inputs the", "for i in range(len(json['zip_code'])): zip_code.send_keys(json['zip_code'][i]) print(\"zip code address section typed\") def input_phone_and_email(): \"\"\"", "'button.btn-primary:nth-child(1)') except NoSuchElementException: return False return True def add_to_cart(): \"\"\" This function finds", "= open('purchased.txt', 'w') json2.write('1') json2.close() elif guest_or_sign_in == \"guest\": select_guest_checkout() # driver.refresh() input_shipping_info_guest()", "the user has provided if they are checking out as a guest :rtype:", "import webdriver from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import", "This function inputs the phone number and email that the user has provided", "button = driver.find_element_by_css_selector( '.cia-form__controls__submit') button.click() print(\"signed in\") def check_if_verify(): \"\"\" This function checks", "add_to_cart(): \"\"\" This function finds the Add to Cart button, and then adds", "button, if it does not find it, it means it is out of", "element_present = EC.presence_of_element_located( (By.ID, 'optimized-cc-card-number')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for", "checkout_button = driver.find_element_by_css_selector( \".btn-lg\") checkout_button.click() print(\"checkout started\") def select_guest_checkout(): \"\"\" This function selects", "return True def check_if_shipping_info_needed(): \"\"\" This function checks to see if the bot", "Account\" in verify: return False else: return True except NoSuchElementException: return False #", "bot needs to input the shipping information if the user has been signed", "\"\"\" This function types the supplied email and password and then clicks the", "page \"\"\" driver.get(url + \"/cart\") print(\"navigated to cart\") return driver.title def change_zip_code_and_select_shipping(): \"\"\"", "to cart\") def navigate_to_cart(): \"\"\" This function navigates to the BestBuy cart page", "'h1.cia-section-title').text if \"Verify Your Account\" in verify: return False else: return True except", "typed\") select = Select(driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.state')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code = driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.zipcode') zip_code.send_keys(json['zip_code'])", "quit(0) if check_if_shipping_info_needed() is True: input_shipping_information() if check_if_payment_info_on_page() is False: click_continue_to_payment_info() input_payment_info() #", "that the user has provided if they are checking out as a guest", "open('purchased.txt', 'r').read() def navigate_to_bb(): \"\"\" * Navigates to the URL supplied, by default", "print(\"changed zip code\") def click_checkout_key(): \"\"\" This function clicks the checkout button on", "\"@everyone Purchased, Time elapsed: \" + str(time_diff) + \" Seconds\") json2 = open('purchased.txt',", "url = \"https://www.bestbuy.com\" timeout = 3 # Timeout for element loaded checks purchased", "import Select from selenium.common.exceptions import NoSuchElementException from random import randint from time import", "sleep from discord import Webhook, RequestsWebhookAdapter import json # Loads config file json", "place_order(): \"\"\" This function places the order by clicking the final button :rtype:", "3 # Timeout for element loaded checks purchased = open('purchased.txt', 'r').read() def navigate_to_bb():", "select.select_by_visible_text(json['state']) print(\"state selected\") zip_code = driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[6]/div[1]/div[1]/label[1]/div[1]/input[1]') for i in range(len(json['zip_code'])): zip_code.send_keys(json['zip_code'][i]) print(\"zip", "change_zip_code_and_select_shipping() click_checkout_key() if guest_or_sign_in == \"sign-in\": sign_in_and_click_button() if not check_if_verify(): quit(0) if check_if_shipping_info_needed()", "0 if purchased.strip() == \"0\": in_stock = 0 while in_stock == 0: navigate_to_product()", "= open('purchased.txt', 'w') json2.write('1') json2.close() else: webhook.send( \"@everyone Not purchased as item has", "from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.common.exceptions import", "\"\"\" * Navigates to the URL supplied + the product URL \"\"\" driver.get(url", "def add_to_cart(): \"\"\" This function finds the Add to Cart button, and then", "typed\") button = driver.find_element_by_css_selector( '.cia-form__controls__submit') button.click() print(\"signed in\") def check_if_verify(): \"\"\" This function", "\"\"\" try: verify = driver.find_element_by_css_selector( 'h1.cia-section-title').text if \"Verify Your Account\" in verify: return", "\"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['city'])): city.send_keys(json['city'][i]) print(\"city typed\") select = Select(driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[2]/label[1]/div[1]/div[1]/select[1]')) select.select_by_visible_text(json['state'])", "json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: if check_if_payment_info_on_page() is False: click_continue_to_payment_info() input_payment_info()", "and then clicks update location. :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR,", "- running script\") #webhook.send(\"@everyone Stock Found\") #webhook.send(url + json['url']) time_start = time.time() add_to_cart()", "script\") #webhook.send(\"@everyone Stock Found\") #webhook.send(url + json['url']) time_start = time.time() add_to_cart() in_stock =", "function inputs the shipping information that the user provides if they have been", "then adds the product to cart :rtype: object \"\"\" try: element_present = EC.presence_of_element_located(", "= driver.find_element_by_css_selector( \".btn-lg\") checkout_button.click() print(\"checkout started\") def select_guest_checkout(): \"\"\" This function selects the", "json2.close() elif guest_or_sign_in == \"guest\": select_guest_checkout() # driver.refresh() input_shipping_info_guest() input_phone_and_email() click_continue_to_payment_info() input_payment_info_guest() #", "open('purchased.txt', 'w') json2.write('1') json2.close() else: webhook.send( \"@everyone Not purchased as item has already", "check_if_in_stock(): \"\"\" This function tries to find the Add To Cart button, if", "and password and then clicks the Sign In button. :rtype: object \"\"\" try:", "return True def input_shipping_information(): \"\"\" This function inputs the shipping information that the", "\"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[3]/label[1]/div[2]/div[1]/div[1]/input[1]\") for i in range(len(json['address'])): address.send_keys(json['address'][i]) print(\"street address typed\") city = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[1]/label[1]/div[1]/input[1]\")", "print(\"year selected\") cvv = driver.find_element_by_css_selector('#credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV typed\") def place_order(): \"\"\" This function", "if \"Verify Your Account\" in verify: return False else: return True except NoSuchElementException:", "WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") fname =", "driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['city'])): city.send_keys(json['city'][i]) print(\"city typed\") select = Select(driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[2]/label[1]/div[1]/div[1]/select[1]'))", "object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed", "select = Select(driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.state')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code = driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.zipcode') zip_code.send_keys(json['zip_code']) print(\"zip", "driver.implicitly_wait(0.3) y = check_if_in_stock() if not y: in_stock = 0 randinteger = randint(1,", "button.click() def main(guest_or_sign_in): time_start = 0 time_end = 0 if purchased.strip() == \"0\":", "\"#location\") zip_code_change.send_keys(json['zip_code']) update = driver.find_element_by_css_selector( '#item-availability-links > button:nth-child(3)') update.click() print(\"changed zip code\") def", "BestBuy cart :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-guest-content__continue')) WebDriverWait(driver, 9).until(element_present)", "suggestions.click() print(\"suggestions removed\") address = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.street\") address.send_keys(json['address']) print(\"street address typed\") city =", "Type \"\"\" try: not_sold_out = driver.find_element_by_css_selector( 'button.btn-primary:nth-child(1)') except NoSuchElementException: return False return True", "webdriver.Firefox( executable_path=json['executable_path']) # Creates WebDriver instance url = \"https://www.bestbuy.com\" timeout = 3 #", "True def input_shipping_information(): \"\"\" This function inputs the shipping information that the user", "information on the current page :rtype: object \"\"\" try: cvv = driver.find_element_by_id('credit-card-cvv') except", "cart :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, 'button.btn-primary:nth-child(1)')) WebDriverWait(driver, timeout).until(element_present) except", "found - running script\") #webhook.send(\"@everyone Stock Found\") #webhook.send(url + json['url']) time_start = time.time()", "if they have been logged in with previous functions :rtype: object \"\"\" try:", "lname.send_keys(json[\"last_name\"]) print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\" in suggestions.text: suggestions.click() print(\"suggestions", "TimeoutException: print(\"Timed out waiting for page to load\") fname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.firstName\") fname.send_keys(json['first_name'])", "\"Hide Suggestions\" in suggestions.text: suggestions.click() print(\"suggestions removed\") address = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.street\") address.send_keys(json['address']) print(\"street", "Cart button, if it does not find it, it means it is out", "function selects the Checkout as Guest option on the page following the BestBuy", "json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: webhook.send( \"@everyone Not purchased as item", "\"https://www.bestbuy.com\" timeout = 3 # Timeout for element loaded checks purchased = open('purchased.txt',", "\"\"\" fname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['first_name'])): fname.send_keys(json['first_name'][i]) print(json['first_name'] + \"", "print(\"street address typed\") city = driver.find_element_by_id(\"consolidatedAddresses.ui_address_2.city\") city.send_keys(json['city']) print(\"city typed\") select = Select(driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.state'))", "\"\"\" This function inputs the CVV if the user has been logged in", "navigate_to_product(): \"\"\" * Navigates to the URL supplied + the product URL \"\"\"", "\"\"\" This function places the order by clicking the final button :rtype: object", "verify: return False else: return True except NoSuchElementException: return False # return True", ":rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, 3).until(element_present) except BaseException:", "needs to input the shipping information if the user has been signed in", "page to load\") email = driver.find_element_by_id(\"fld-e\") email.send_keys(json['email']) print(\"email typed\") password = driver.find_element_by_id(\"fld-p1\") password.send_keys(json['password'])", "import NoSuchElementException from random import randint from time import sleep from discord import", "\"0\": in_stock = 0 while in_stock == 0: navigate_to_product() driver.implicitly_wait(0.3) y = check_if_in_stock()", "'w') json2.write('1') json2.close() elif guest_or_sign_in == \"guest\": select_guest_checkout() # driver.refresh() input_shipping_info_guest() input_phone_and_email() click_continue_to_payment_info()", "if guest_or_sign_in == \"sign-in\": sign_in_and_click_button() if not check_if_verify(): quit(0) if check_if_shipping_info_needed() is True:", "= driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\" in suggestions.text: suggestions.click() print(\"suggestions removed\") address = driver.find_element_by_id(", "i in range(len(json['city'])): city.send_keys(json['city'][i]) print(\"city typed\") select = Select(driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[2]/label[1]/div[1]/div[1]/select[1]')) select.select_by_visible_text(json['state']) print(\"state selected\")", "has provided if they are checking out as a guest :rtype: object \"\"\"", "object \"\"\" try: cvv = driver.find_element_by_id('credit-card-cvv') except NoSuchElementException: return False return True def", "def input_payment_info_guest(): \"\"\" This function inputs the payment information of the user if", "product to cart :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, 'button.btn-primary:nth-child(1)')) WebDriverWait(driver,", "in range(len(json['address'])): address.send_keys(json['address'][i]) print(\"street address typed\") city = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[1]/label[1]/div[1]/input[1]\") for i in", "Stock Found\") #webhook.send(url + json['url']) time_start = time.time() add_to_cart() in_stock = 1 navigate_to_cart()", "to the BestBuy cart page \"\"\" driver.get(url + \"/cart\") print(\"navigated to cart\") return", "select = Select(driver.find_element_by_name( 'expiration-year')) select.select_by_visible_text(json['year']) print(\"year selected\") cvv = driver.find_element_by_css_selector('#credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV typed\")", "driver.find_element_by_css_selector( \"button.btn-primary:nth-child(1)\") add_to_cart_button.click() print(\"added to cart\") def navigate_to_cart(): \"\"\" This function navigates to", "range(len(json['address'])): address.send_keys(json['address'][i]) print(\"street address typed\") city = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['city'])):", "= driver.find_element_by_css_selector( '.btn-lg') button.click() def main(guest_or_sign_in): time_start = 0 time_end = 0 if", "removed\") address = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[3]/label[1]/div[2]/div[1]/div[1]/input[1]\") for i in range(len(json['address'])): address.send_keys(json['address'][i]) print(\"street address typed\")", "inputs the shipping information that the user provides if they have been logged", "def input_phone_and_email(): \"\"\" This function inputs the phone number and email that the", "the correct zip code for shipping, and then clicks update location. :rtype: object", ":rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-form__controls__submit')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException:", "print(\"Timed out waiting for page to load\") fname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.firstName\") fname.send_keys(json['first_name']) print(\"fname", "return True def add_to_cart(): \"\"\" This function finds the Add to Cart button,", "This function inputs the CVV if the user has been logged in during", "Add to Cart button, and then adds the product to cart :rtype: object", "shipping information that the user provides if they have been logged in with", "json2.close() else: webhook.send( \"@everyone Not purchased as item has already been bought. \"", "from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select from selenium.common.exceptions import NoSuchElementException from", "enter payment information on the current page :rtype: object \"\"\" try: cvv =", "\"\"\" This function finds the Add to Cart button, and then adds the", "BestBuy cart page \"\"\" driver.get(url + \"/cart\") print(\"navigated to cart\") return driver.title def", "then types the correct zip code for shipping, and then clicks update location.", "\"\"\" try: cvv = driver.find_element_by_id('credit-card-cvv') except NoSuchElementException: return False return True def click_continue_to_payment_info():", "= Select(driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.state')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code = driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.zipcode') zip_code.send_keys(json['zip_code']) print(\"zip code", "driver.refresh() input_shipping_info_guest() input_phone_and_email() click_continue_to_payment_info() input_payment_info_guest() # place_order() time_end = time.time() time_diff = time_end", "driver.title def change_zip_code_and_select_shipping(): \"\"\" This function first selects the ZipCode element on the", "print(\"email typed\") password = driver.find_element_by_id(\"fld-p1\") password.send_keys(json['password']) print(\"password typed\") button = driver.find_element_by_css_selector( '.cia-form__controls__submit') button.click()", "the page following the BestBuy cart :rtype: object \"\"\" try: element_present = EC.presence_of_element_located(", "\"\"\" button = driver.find_element_by_css_selector( '.btn-lg') button.click() def main(guest_or_sign_in): time_start = 0 time_end =", "+ json['url']) def check_if_in_stock(): \"\"\" This function tries to find the Add To", "False :rtype: object \"\"\" button = driver.find_element_by_css_selector( '.btn-lg') button.click() def input_payment_info(): \"\"\" This", "Found\") #webhook.send(url + json['url']) time_start = time.time() add_to_cart() in_stock = 1 navigate_to_cart() change_zip_code_and_select_shipping()", "Timeout for element loaded checks purchased = open('purchased.txt', 'r').read() def navigate_to_bb(): \"\"\" *", "if purchased.strip() == \"0\": in_stock = 0 while in_stock == 0: navigate_to_product() driver.implicitly_wait(0.3)", "cc_number = driver.find_element_by_id( 'optimized-cc-card-number') cc_number.send_keys(json['cc_number']) select = Select(driver.find_element_by_name( 'expiration-month')) select.select_by_visible_text(json['month']) print(\"month selected\") select", "for i in range(len(json['city'])): city.send_keys(json['city'][i]) print(\"city typed\") select = Select(driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[2]/label[1]/div[1]/div[1]/select[1]')) select.select_by_visible_text(json['state']) print(\"state", "using the previous functions :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName'))", "# Creates WebDriver instance url = \"https://www.bestbuy.com\" timeout = 3 # Timeout for", "selects the Checkout as Guest option on the page following the BestBuy cart", "driver.get(url) print(\"navigated to bestbuy\") def navigate_to_product(): \"\"\" * Navigates to the URL supplied", ":rtype: object \"\"\" checkout_button = driver.find_element_by_css_selector( \".btn-lg\") checkout_button.click() print(\"checkout started\") def select_guest_checkout(): \"\"\"", "\"\"\" This function inputs the phone number and email that the user has", "driver.find_element_by_id('user.emailAddress') email.send_keys(json['email']) phone = driver.find_element_by_id('user.phone') phone.send_keys(json['phone']) def check_if_payment_info_on_page(): \"\"\" This function checks if", "from discord import Webhook, RequestsWebhookAdapter import json # Loads config file json =", "(By.ID, 'optimized-cc-card-number')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\")", "clicks the checkout button on the BestBuy cart page :rtype: object \"\"\" checkout_button", "seconds due to product not being in stock\") sleep(randinteger) else: #print(\"Stock found -", "NoSuchElementException: return False return True def add_to_cart(): \"\"\" This function finds the Add", "= 3 # Timeout for element loaded checks purchased = open('purchased.txt', 'r').read() def", "function types the supplied email and password and then clicks the Sign In", "previous function returns False :rtype: object \"\"\" button = driver.find_element_by_css_selector( '.btn-lg') button.click() def", "\"guest\": select_guest_checkout() # driver.refresh() input_shipping_info_guest() input_phone_and_email() click_continue_to_payment_info() input_payment_info_guest() # place_order() time_end = time.time()", "print(json['first_name'] + \" typed\") lname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[2]/label[1]/div[1]/input[1]\") for i in range(len(json['last_name'])): lname.send_keys(json[\"last_name\"][i])", "user has been logged in during a previous function and has a card", "typed\") def place_order(): \"\"\" This function places the order by clicking the final", "'.cia-form__controls__submit')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") email", "from selenium.webdriver.support import expected_conditions as EC from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import", "for i in range(len(json['address'])): address.send_keys(json['address'][i]) print(\"street address typed\") city = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[1]/label[1]/div[1]/input[1]\") for", "driver.find_element_by_css_selector( '.cia-form__controls__submit') button.click() print(\"signed in\") def check_if_verify(): \"\"\" This function checks if the", "selenium import webdriver from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support", "print(\"street address typed\") city = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['city'])): city.send_keys(json['city'][i]) print(\"city", "= 1 navigate_to_cart() change_zip_code_and_select_shipping() click_checkout_key() if guest_or_sign_in == \"sign-in\": sign_in_and_click_button() if not check_if_verify():", "is False: click_continue_to_payment_info() input_payment_info() # place_order() time_end = time.time() time_diff = time_end -", "TimeoutException from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select from selenium.common.exceptions import NoSuchElementException", "information if the previous function returns False :rtype: object \"\"\" button = driver.find_element_by_css_selector(", "Add To Cart button, if it does not find it, it means it", "EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-form__controls__submit')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to", "typed\") city = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['city'])): city.send_keys(json['city'][i]) print(\"city typed\") select", "WebDriver instance url = \"https://www.bestbuy.com\" timeout = 3 # Timeout for element loaded", "driver.find_element_by_css_selector( 'button.btn-primary:nth-child(1)') except NoSuchElementException: return False return True def add_to_cart(): \"\"\" This function", "+ \" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: webhook.send( \"@everyone Not", "def input_shipping_information(): \"\"\" This function inputs the shipping information that the user provides", "purchased.strip() == \"0\": in_stock = 0 while in_stock == 0: navigate_to_product() driver.implicitly_wait(0.3) y", "str(time_diff) + \" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() elif guest_or_sign_in ==", "\"button.btn-primary:nth-child(1)\") add_to_cart_button.click() print(\"added to cart\") def navigate_to_cart(): \"\"\" This function navigates to the", "throws a NoSuchElementException. :return: Returns True for in stock and False for not", "except NoSuchElementException: return False return True def add_to_cart(): \"\"\" This function finds the", "payment information if the previous function returns False :rtype: object \"\"\" button =", "suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\" in suggestions.text: suggestions.click() print(\"suggestions removed\") address =", "cvv.send_keys(json['cvv']) print(\"CVV typed\") def place_order(): \"\"\" This function places the order by clicking", "if check_if_payment_info_on_page() is False: click_continue_to_payment_info() input_payment_info() # place_order() time_end = time.time() time_diff =", "False else: return True except NoSuchElementException: return False # return True def check_if_shipping_info_needed():", "guest :rtype: object \"\"\" fname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['first_name'])): fname.send_keys(json['first_name'][i])", "print( \"Sleeping for \" + str(randinteger) + \" seconds due to product not", "NoSuchElementException. :return: Returns True for in stock and False for not in stock", "= driver.find_element_by_id(\"fld-p1\") password.send_keys(json['password']) print(\"password typed\") button = driver.find_element_by_css_selector( '.cia-form__controls__submit') button.click() print(\"signed in\") def", "randint from time import sleep from discord import Webhook, RequestsWebhookAdapter import json #", "WebDriverWait(driver, 3).until(element_present) except BaseException: return False return True def input_shipping_information(): \"\"\" This function", "driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['first_name'])): fname.send_keys(json['first_name'][i]) print(json['first_name'] + \" typed\") lname =", "a guest :rtype: object \"\"\" fname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['first_name'])):", "checkout as a guest :rtype: object \"\"\" fname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[1]/label[1]/div[1]/input[1]\") for i", "= driver.find_element_by_css_selector('#credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV typed\") def place_order(): \"\"\" This function places the order", "email.send_keys(json['email']) phone = driver.find_element_by_id('user.phone') phone.send_keys(json['phone']) def check_if_payment_info_on_page(): \"\"\" This function checks if the", "EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, 3).until(element_present) except BaseException: return False return True def input_shipping_information():", "Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() elif guest_or_sign_in == \"guest\": select_guest_checkout() #", "on the BestBuy cart page :rtype: object \"\"\" checkout_button = driver.find_element_by_css_selector( \".btn-lg\") checkout_button.click()", "y = check_if_in_stock() if not y: in_stock = 0 randinteger = randint(1, 5)", "been bought. \" \"To reset this please open purchased.txt and replace the 0", "the user provides if they have selected to checkout as a guest :rtype:", "is True: input_shipping_information() if check_if_payment_info_on_page() is False: click_continue_to_payment_info() input_payment_info() # place_order() time_end =", "0 while in_stock == 0: navigate_to_product() driver.implicitly_wait(0.3) y = check_if_in_stock() if not y:", "function inputs the shipping information that the user provides if they have selected", "lname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[2]/label[1]/div[1]/input[1]\") for i in range(len(json['last_name'])): lname.send_keys(json[\"last_name\"][i]) print(\"lname typed\") suggestions =", "typed\") password = driver.find_element_by_id(\"fld-p1\") password.send_keys(json['password']) print(\"password typed\") button = driver.find_element_by_css_selector( '.cia-form__controls__submit') button.click() print(\"signed", "then clicks update location. :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.change-zipcode-link'))", "driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[2]/label[1]/div[1]/input[1]\") for i in range(len(json['last_name'])): lname.send_keys(json[\"last_name\"][i]) print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if", "Select(driver.find_element_by_name( 'expiration-year')) select.select_by_visible_text(json['year']) print(\"year selected\") cvv = driver.find_element_by_css_selector('#credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV typed\") def place_order():", "0 randinteger = randint(1, 5) print( \"Sleeping for \" + str(randinteger) + \"", "typed\") city = driver.find_element_by_id(\"consolidatedAddresses.ui_address_2.city\") city.send_keys(json['city']) print(\"city typed\") select = Select(driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.state')) select.select_by_visible_text(json['state']) print(\"state", "= driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['first_name'])): fname.send_keys(json['first_name'][i]) print(json['first_name'] + \" typed\") lname", "button, and then adds the product to cart :rtype: object \"\"\" try: element_present", "information if the user has been signed in using the previous functions :rtype:", "a previous function and has a card saved :rtype: object \"\"\" cvv =", "input_shipping_info_guest() input_phone_and_email() click_continue_to_payment_info() input_payment_info_guest() # place_order() time_end = time.time() time_diff = time_end -", "and has a card saved :rtype: object \"\"\" cvv = driver.find_element_by_id('credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV", "selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC", "selected\") zip_code = driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[6]/div[1]/div[1]/label[1]/div[1]/input[1]') for i in range(len(json['zip_code'])): zip_code.send_keys(json['zip_code'][i]) print(\"zip code address", "# Creates webhook using discord url driver = webdriver.Firefox( executable_path=json['executable_path']) # Creates WebDriver", "BestBuy cart page :rtype: object \"\"\" checkout_button = driver.find_element_by_css_selector( \".btn-lg\") checkout_button.click() print(\"checkout started\")", "'/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[6]/div[1]/div[1]/label[1]/div[1]/input[1]') for i in range(len(json['zip_code'])): zip_code.send_keys(json['zip_code'][i]) print(\"zip code address section typed\") def input_phone_and_email():", "the bot must enter payment information on the current page :rtype: object \"\"\"", "print(\"navigated to bestbuy\") def navigate_to_product(): \"\"\" * Navigates to the URL supplied +", "Cart button, and then adds the product to cart :rtype: object \"\"\" try:", "if the user has been logged in during a previous function and has", "they have selected Guest checkout :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID,", "# driver.refresh() input_shipping_info_guest() input_phone_and_email() click_continue_to_payment_info() input_payment_info_guest() # place_order() time_end = time.time() time_diff =", "user has provided if they are checking out as a guest :rtype: object", "= open('purchased.txt', 'w') json2.write('1') json2.close() else: if check_if_payment_info_on_page() is False: click_continue_to_payment_info() input_payment_info() #", "purchased as item has already been bought. \" \"To reset this please open", "TimeoutException: print(\"Timed out waiting for page to load\") cc_number = driver.find_element_by_id( 'optimized-cc-card-number') cc_number.send_keys(json['cc_number'])", "a guest :rtype: object \"\"\" email = driver.find_element_by_id('user.emailAddress') email.send_keys(json['email']) phone = driver.find_element_by_id('user.phone') phone.send_keys(json['phone'])", "\"\"\" This function clicks the continue to payment information if the previous function", "Purchased, Time elapsed: \" + str(time_diff) + \" Seconds\") json2 = open('purchased.txt', 'w')", "driver.find_element_by_css_selector( \".btn-lg\") checkout_button.click() print(\"checkout started\") def select_guest_checkout(): \"\"\" This function selects the Checkout", "bought. \" \"To reset this please open purchased.txt and replace the 0 with", "button.click() def input_payment_info(): \"\"\" This function inputs the CVV if the user has", "load\") cc_number = driver.find_element_by_id( 'optimized-cc-card-number') cc_number.send_keys(json['cc_number']) select = Select(driver.find_element_by_name( 'expiration-month')) select.select_by_visible_text(json['month']) print(\"month selected\")", "URL supplied + the product URL \"\"\" driver.get(url + json['url']) def check_if_in_stock(): \"\"\"", "\"consolidatedAddresses.ui_address_2.lastName\") lname.send_keys(json[\"last_name\"]) print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\" in suggestions.text: suggestions.click()", "address typed\") city = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['city'])): city.send_keys(json['city'][i]) print(\"city typed\")", "in suggestions.text: suggestions.click() print(\"suggestions removed\") address = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.street\") address.send_keys(json['address']) print(\"street address typed\")", "from selenium import webdriver from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from", "email that the user has provided if they are checking out as a", "\"\"\" button = driver.find_element_by_css_selector( '.btn-lg') button.click() def input_payment_info(): \"\"\" This function inputs the", "main(guest_or_sign_in): time_start = 0 time_end = 0 if purchased.strip() == \"0\": in_stock =", "+ str(time_diff) + \" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: input_payment_info()", "input_shipping_information(): \"\"\" This function inputs the shipping information that the user provides if", "import sleep from discord import Webhook, RequestsWebhookAdapter import json # Loads config file", "def check_if_in_stock(): \"\"\" This function tries to find the Add To Cart button,", "see if the bot needs to input the shipping information if the user", "selected Guest checkout :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'optimized-cc-card-number')) WebDriverWait(driver,", "def sign_in_and_click_button(): \"\"\" This function types the supplied email and password and then", "driver.find_element_by_css_selector( '.btn-lg') button.click() def main(guest_or_sign_in): time_start = 0 time_end = 0 if purchased.strip()", "= EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-form__controls__submit')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page", "driver.find_element_by_id(\"consolidatedAddresses.ui_address_2.city\") city.send_keys(json['city']) print(\"city typed\") select = Select(driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.state')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code =", "'button.btn-primary:nth-child(1)')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") add_to_cart_button", "This function inputs the shipping information that the user provides if they have", "page to load\") fname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.firstName\") fname.send_keys(json['first_name']) print(\"fname typed\") lname = driver.find_element_by_id(", "def check_if_payment_info_on_page(): \"\"\" This function checks if the bot must enter payment information", "zip_code.send_keys(json['zip_code'][i]) print(\"zip code address section typed\") def input_phone_and_email(): \"\"\" This function inputs the", "+ str(time_diff) + \" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: webhook.send(", "ZipCode element on the cart page, then types the correct zip code for", ":rtype: object \"\"\" try: verify = driver.find_element_by_css_selector( 'h1.cia-section-title').text if \"Verify Your Account\" in", "try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, 'button.btn-primary:nth-child(1)')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting", "input_shipping_info_guest(): \"\"\" This function inputs the shipping information that the user provides if", "Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.common.exceptions", "= Webhook.from_url( json['discord_webook'], adapter=RequestsWebhookAdapter()) # Creates webhook using discord url driver = webdriver.Firefox(", "default this is BestBuy.com \"\"\" driver.get(url) print(\"navigated to bestbuy\") def navigate_to_product(): \"\"\" *", "means it is out of stock currently and it throws a NoSuchElementException. :return:", "in using the previous functions :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID,", "i in range(len(json['first_name'])): fname.send_keys(json['first_name'][i]) print(json['first_name'] + \" typed\") lname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[2]/label[1]/div[1]/input[1]\") for", "def navigate_to_cart(): \"\"\" This function navigates to the BestBuy cart page \"\"\" driver.get(url", "select_guest_checkout() # driver.refresh() input_shipping_info_guest() input_phone_and_email() click_continue_to_payment_info() input_payment_info_guest() # place_order() time_end = time.time() time_diff", "by default this is BestBuy.com \"\"\" driver.get(url) print(\"navigated to bestbuy\") def navigate_to_product(): \"\"\"", "address typed\") city = driver.find_element_by_id(\"consolidatedAddresses.ui_address_2.city\") city.send_keys(json['city']) print(\"city typed\") select = Select(driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.state')) select.select_by_visible_text(json['state'])", "that the user provides if they have been logged in with previous functions", "5) print( \"Sleeping for \" + str(randinteger) + \" seconds due to product", "in verify: return False else: return True except NoSuchElementException: return False # return", "i in range(len(json['last_name'])): lname.send_keys(json[\"last_name\"][i]) print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\" in", "check_if_shipping_info_needed() is True: input_shipping_information() if check_if_payment_info_on_page() is False: click_continue_to_payment_info() input_payment_info() # place_order() time_end", "print(\"city typed\") select = Select(driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.state')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code = driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.zipcode')", "selected to checkout as a guest :rtype: object \"\"\" fname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[1]/label[1]/div[1]/input[1]\")", "for element loaded checks purchased = open('purchased.txt', 'r').read() def navigate_to_bb(): \"\"\" * Navigates", "the phone number and email that the user has provided if they are", "in_stock = 1 navigate_to_cart() change_zip_code_and_select_shipping() click_checkout_key() if guest_or_sign_in == \"sign-in\": sign_in_and_click_button() if not", "selected\") cvv = driver.find_element_by_css_selector('#credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV typed\") def place_order(): \"\"\" This function places", "print(\"CVV typed\") def place_order(): \"\"\" This function places the order by clicking the", "+ str(randinteger) + \" seconds due to product not being in stock\") sleep(randinteger)", "the URL supplied, by default this is BestBuy.com \"\"\" driver.get(url) print(\"navigated to bestbuy\")", "except TimeoutException: print(\"Timed out waiting for page to load\") zip_code_click = driver.find_element_by_css_selector( \".change-zipcode-link\")", "sign_in_and_click_button() if not check_if_verify(): quit(0) if check_if_shipping_info_needed() is True: input_shipping_information() if check_if_payment_info_on_page() is", "== \"guest\": select_guest_checkout() # driver.refresh() input_shipping_info_guest() input_phone_and_email() click_continue_to_payment_info() input_payment_info_guest() # place_order() time_end =", "= Select(driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[2]/label[1]/div[1]/div[1]/select[1]')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code = driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[6]/div[1]/div[1]/label[1]/div[1]/input[1]') for i in", "\" + str(randinteger) + \" seconds due to product not being in stock\")", "TimeoutException: print(\"Timed out waiting for page to load\") add_to_cart_button = driver.find_element_by_css_selector( \"button.btn-primary:nth-child(1)\") add_to_cart_button.click()", "\"\"\" This function tries to find the Add To Cart button, if it", "object \"\"\" try: verify = driver.find_element_by_css_selector( 'h1.cia-section-title').text if \"Verify Your Account\" in verify:", "information of the user if they have selected Guest checkout :rtype: object \"\"\"", "webhook.send( \"@everyone Purchased, Time elapsed: \" + str(time_diff) + \" Seconds\") json2 =", "previous functions :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, timeout).until(element_present)", "suggestions.text: suggestions.click() print(\"suggestions removed\") address = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[3]/label[1]/div[2]/div[1]/div[1]/input[1]\") for i in range(len(json['address'])): address.send_keys(json['address'][i])", "while in_stock == 0: navigate_to_product() driver.implicitly_wait(0.3) y = check_if_in_stock() if not y: in_stock", "> button:nth-child(3)') update.click() print(\"changed zip code\") def click_checkout_key(): \"\"\" This function clicks the", "time.time() add_to_cart() in_stock = 1 navigate_to_cart() change_zip_code_and_select_shipping() click_checkout_key() if guest_or_sign_in == \"sign-in\": sign_in_and_click_button()", "from random import randint from time import sleep from discord import Webhook, RequestsWebhookAdapter", "'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, 3).until(element_present) except BaseException: return False return True def input_shipping_information(): \"\"\" This", "to product not being in stock\") sleep(randinteger) else: #print(\"Stock found - running script\")", "# Loads config file json = json.load(open('config.json', 'r')) webhook = Webhook.from_url( json['discord_webook'], adapter=RequestsWebhookAdapter())", "they have been logged in with previous functions :rtype: object \"\"\" try: element_present", "Select(driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.state')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code = driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.zipcode') zip_code.send_keys(json['zip_code']) print(\"zip code address", "NoSuchElementException from random import randint from time import sleep from discord import Webhook,", "This function selects the Checkout as Guest option on the page following the", "adds the product to cart :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR,", "driver.find_element_by_css_selector( \".change-zipcode-link\") zip_code_click.send_keys(Keys.ENTER) print(\"clicked on zip code\") zip_code_change = driver.find_element_by_css_selector( \"#location\") zip_code_change.send_keys(json['zip_code']) update", "# return True def check_if_shipping_info_needed(): \"\"\" This function checks to see if the", "checks to see if the bot needs to input the shipping information if", "cart page :rtype: object \"\"\" checkout_button = driver.find_element_by_css_selector( \".btn-lg\") checkout_button.click() print(\"checkout started\") def", "manual user verification :rtype: object \"\"\" try: verify = driver.find_element_by_css_selector( 'h1.cia-section-title').text if \"Verify", "as item has already been bought. \" \"To reset this please open purchased.txt", "#webhook.send(url + json['url']) time_start = time.time() add_to_cart() in_stock = 1 navigate_to_cart() change_zip_code_and_select_shipping() click_checkout_key()", "def main(guest_or_sign_in): time_start = 0 time_end = 0 if purchased.strip() == \"0\": in_stock", "been signed in using the previous functions :rtype: object \"\"\" try: element_present =", "function inputs the payment information of the user if they have selected Guest", "page to load\") add_to_cart_button = driver.find_element_by_css_selector( \"button.btn-primary:nth-child(1)\") add_to_cart_button.click() print(\"added to cart\") def navigate_to_cart():", "code for shipping, and then clicks update location. :rtype: object \"\"\" try: element_present", "with previous functions :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver,", "+ str(time_diff) + \" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() elif guest_or_sign_in", "json['url']) time_start = time.time() add_to_cart() in_stock = 1 navigate_to_cart() change_zip_code_and_select_shipping() click_checkout_key() if guest_or_sign_in", "driver = webdriver.Firefox( executable_path=json['executable_path']) # Creates WebDriver instance url = \"https://www.bestbuy.com\" timeout =", "driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.firstName\") fname.send_keys(json['first_name']) print(\"fname typed\") lname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.lastName\") lname.send_keys(json[\"last_name\"]) print(\"lname typed\") suggestions", "waiting for page to load\") guest = driver.find_element_by_css_selector('.cia-guest-content__continue') guest.click() def sign_in_and_click_button(): \"\"\" This", "== \"0\": in_stock = 0 while in_stock == 0: navigate_to_product() driver.implicitly_wait(0.3) y =", "in with previous functions :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName'))", "the cart page, then types the correct zip code for shipping, and then", "driver.find_element_by_id(\"fld-e\") email.send_keys(json['email']) print(\"email typed\") password = driver.find_element_by_id(\"fld-p1\") password.send_keys(json['password']) print(\"password typed\") button = driver.find_element_by_css_selector(", "the shipping information if the user has been signed in using the previous", "clicks the Sign In button. :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR,", "try: cvv = driver.find_element_by_id('credit-card-cvv') except NoSuchElementException: return False return True def click_continue_to_payment_info(): \"\"\"", "URL supplied, by default this is BestBuy.com \"\"\" driver.get(url) print(\"navigated to bestbuy\") def", "to payment information if the previous function returns False :rtype: object \"\"\" button", "This function inputs the payment information of the user if they have selected", "due to product not being in stock\") sleep(randinteger) else: #print(\"Stock found - running", "this please open purchased.txt and replace the 0 with a 1\") quit(0) main(guest_or_sign_in=json['bot_usage_case'])", "\"\"\" This function checks to see if the bot needs to input the", "return False else: return True except NoSuchElementException: return False # return True def", "= driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.zipcode') zip_code.send_keys(json['zip_code']) print(\"zip code address section typed\") def input_shipping_info_guest(): \"\"\" This", "load\") fname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.firstName\") fname.send_keys(json['first_name']) print(\"fname typed\") lname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.lastName\") lname.send_keys(json[\"last_name\"])", "def select_guest_checkout(): \"\"\" This function selects the Checkout as Guest option on the", "Select(driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[2]/label[1]/div[1]/div[1]/select[1]')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code = driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[6]/div[1]/div[1]/label[1]/div[1]/input[1]') for i in range(len(json['zip_code'])):", "if the previous function returns False :rtype: object \"\"\" button = driver.find_element_by_css_selector( '.btn-lg')", "if they have selected Guest checkout :rtype: object \"\"\" try: element_present = EC.presence_of_element_located(", "element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-form__controls__submit')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for", "the BestBuy cart :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-guest-content__continue')) WebDriverWait(driver,", "final button :rtype: object \"\"\" button = driver.find_element_by_css_selector( '.btn-lg') button.click() def main(guest_or_sign_in): time_start", "EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to", "range(len(json['last_name'])): lname.send_keys(json[\"last_name\"][i]) print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\" in suggestions.text: suggestions.click()", "for page to load\") email = driver.find_element_by_id(\"fld-e\") email.send_keys(json['email']) print(\"email typed\") password = driver.find_element_by_id(\"fld-p1\")", "= driver.find_element_by_id('user.phone') phone.send_keys(json['phone']) def check_if_payment_info_on_page(): \"\"\" This function checks if the bot must", "has a card saved :rtype: object \"\"\" cvv = driver.find_element_by_id('credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV added\")", "not find it, it means it is out of stock currently and it", "# Timeout for element loaded checks purchased = open('purchased.txt', 'r').read() def navigate_to_bb(): \"\"\"", "\"\"\" checkout_button = driver.find_element_by_css_selector( \".btn-lg\") checkout_button.click() print(\"checkout started\") def select_guest_checkout(): \"\"\" This function", "stock :rtype: None Type \"\"\" try: not_sold_out = driver.find_element_by_css_selector( 'button.btn-primary:nth-child(1)') except NoSuchElementException: return", "except TimeoutException: print(\"Timed out waiting for page to load\") guest = driver.find_element_by_css_selector('.cia-guest-content__continue') guest.click()", "= EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page", "json # Loads config file json = json.load(open('config.json', 'r')) webhook = Webhook.from_url( json['discord_webook'],", "Loads config file json = json.load(open('config.json', 'r')) webhook = Webhook.from_url( json['discord_webook'], adapter=RequestsWebhookAdapter()) #", "has been flagged for manual user verification :rtype: object \"\"\" try: verify =", "page :rtype: object \"\"\" checkout_button = driver.find_element_by_css_selector( \".btn-lg\") checkout_button.click() print(\"checkout started\") def select_guest_checkout():", "= driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[3]/label[1]/div[2]/div[1]/div[1]/input[1]\") for i in range(len(json['address'])): address.send_keys(json['address'][i]) print(\"street address typed\") city =", "import By from selenium.webdriver.support.ui import Select from selenium.common.exceptions import NoSuchElementException from random import", "print(\"Timed out waiting for page to load\") cc_number = driver.find_element_by_id( 'optimized-cc-card-number') cc_number.send_keys(json['cc_number']) select", "selected\") select = Select(driver.find_element_by_name( 'expiration-year')) select.select_by_visible_text(json['year']) print(\"year selected\") cvv = driver.find_element_by_css_selector('#credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV", "webhook = Webhook.from_url( json['discord_webook'], adapter=RequestsWebhookAdapter()) # Creates webhook using discord url driver =", "\"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, 3).until(element_present) except BaseException: return False", "TimeoutException: print(\"Timed out waiting for page to load\") zip_code_click = driver.find_element_by_css_selector( \".change-zipcode-link\") zip_code_click.send_keys(Keys.ENTER)", "\"\"\" This function inputs the shipping information that the user provides if they", "'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") fname", "been logged in during a previous function and has a card saved :rtype:", "to checkout as a guest :rtype: object \"\"\" fname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[1]/label[1]/div[1]/input[1]\") for", "logged in during a previous function and has a card saved :rtype: object", "zip_code.send_keys(json['zip_code']) print(\"zip code address section typed\") def input_shipping_info_guest(): \"\"\" This function inputs the", "does not find it, it means it is out of stock currently and", "json2.close() else: input_payment_info() # place_order time_end = time.time() time_diff = time_end - time_start", "a card saved :rtype: object \"\"\" cvv = driver.find_element_by_id('credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV added\") def", "if the account has been flagged for manual user verification :rtype: object \"\"\"", "the product to cart :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, 'button.btn-primary:nth-child(1)'))", "in range(len(json['first_name'])): fname.send_keys(json['first_name'][i]) print(json['first_name'] + \" typed\") lname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[2]/label[1]/div[1]/input[1]\") for i", "checking out as a guest :rtype: object \"\"\" email = driver.find_element_by_id('user.emailAddress') email.send_keys(json['email']) phone", "product not being in stock\") sleep(randinteger) else: #print(\"Stock found - running script\") #webhook.send(\"@everyone", "code address section typed\") def input_shipping_info_guest(): \"\"\" This function inputs the shipping information", "\"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['first_name'])): fname.send_keys(json['first_name'][i]) print(json['first_name'] + \" typed\") lname = driver.find_element_by_xpath(", "import json # Loads config file json = json.load(open('config.json', 'r')) webhook = Webhook.from_url(", "object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, 'button.btn-primary:nth-child(1)')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed", "\"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, 'button.btn-primary:nth-child(1)')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out", "= EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-guest-content__continue')) WebDriverWait(driver, 9).until(element_present) except TimeoutException: print(\"Timed out waiting for page", "user if they have selected Guest checkout :rtype: object \"\"\" try: element_present =", "EC.presence_of_element_located( (By.ID, 'optimized-cc-card-number')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to", "inputs the payment information of the user if they have selected Guest checkout", "phone.send_keys(json['phone']) def check_if_payment_info_on_page(): \"\"\" This function checks if the bot must enter payment", "out waiting for page to load\") cc_number = driver.find_element_by_id( 'optimized-cc-card-number') cc_number.send_keys(json['cc_number']) select =", ":rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.change-zipcode-link')) WebDriverWait(driver, 10).until(element_present) except TimeoutException:", "+ \" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: if check_if_payment_info_on_page() is", "from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select from", "are checking out as a guest :rtype: object \"\"\" email = driver.find_element_by_id('user.emailAddress') email.send_keys(json['email'])", "= driver.find_element_by_id('credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV added\") def input_payment_info_guest(): \"\"\" This function inputs the payment", "time_end = 0 if purchased.strip() == \"0\": in_stock = 0 while in_stock ==", "+ \" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() elif guest_or_sign_in == \"guest\":", "user verification :rtype: object \"\"\" try: verify = driver.find_element_by_css_selector( 'h1.cia-section-title').text if \"Verify Your", "try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.change-zipcode-link')) WebDriverWait(driver, 10).until(element_present) except TimeoutException: print(\"Timed out waiting", "'.btn-lg') button.click() def input_payment_info(): \"\"\" This function inputs the CVV if the user", "driver.get(url + \"/cart\") print(\"navigated to cart\") return driver.title def change_zip_code_and_select_shipping(): \"\"\" This function", "Guest option on the page following the BestBuy cart :rtype: object \"\"\" try:", "on the page following the BestBuy cart :rtype: object \"\"\" try: element_present =", "else: return True except NoSuchElementException: return False # return True def check_if_shipping_info_needed(): \"\"\"", "then clicks the Sign In button. :rtype: object \"\"\" try: element_present = EC.presence_of_element_located(", "None Type \"\"\" try: not_sold_out = driver.find_element_by_css_selector( 'button.btn-primary:nth-child(1)') except NoSuchElementException: return False return", "to cart\") return driver.title def change_zip_code_and_select_shipping(): \"\"\" This function first selects the ZipCode", "driver.find_element_by_css_selector('.cia-guest-content__continue') guest.click() def sign_in_and_click_button(): \"\"\" This function types the supplied email and password", "the previous function returns False :rtype: object \"\"\" button = driver.find_element_by_css_selector( '.btn-lg') button.click()", "time.time() time_diff = time_end - time_start webhook.send( \"@everyone Purchased, Time elapsed: \" +", "navigate_to_bb(): \"\"\" * Navigates to the URL supplied, by default this is BestBuy.com", "is out of stock currently and it throws a NoSuchElementException. :return: Returns True", "verification :rtype: object \"\"\" try: verify = driver.find_element_by_css_selector( 'h1.cia-section-title').text if \"Verify Your Account\"", "= driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[6]/div[1]/div[1]/label[1]/div[1]/input[1]') for i in range(len(json['zip_code'])): zip_code.send_keys(json['zip_code'][i]) print(\"zip code address section typed\")", "= webdriver.Firefox( executable_path=json['executable_path']) # Creates WebDriver instance url = \"https://www.bestbuy.com\" timeout = 3", "if the bot must enter payment information on the current page :rtype: object", "object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.change-zipcode-link')) WebDriverWait(driver, 10).until(element_present) except TimeoutException: print(\"Timed", "the shipping information that the user provides if they have been logged in", "address = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.street\") address.send_keys(json['address']) print(\"street address typed\") city = driver.find_element_by_id(\"consolidatedAddresses.ui_address_2.city\") city.send_keys(json['city']) print(\"city", "webdriver from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions", "the user if they have selected Guest checkout :rtype: object \"\"\" try: element_present", "for \" + str(randinteger) + \" seconds due to product not being in", "3).until(element_present) except BaseException: return False return True def input_shipping_information(): \"\"\" This function inputs", "0: navigate_to_product() driver.implicitly_wait(0.3) y = check_if_in_stock() if not y: in_stock = 0 randinteger", "'w') json2.write('1') json2.close() else: webhook.send( \"@everyone Not purchased as item has already been", "= check_if_in_stock() if not y: in_stock = 0 randinteger = randint(1, 5) print(", "update = driver.find_element_by_css_selector( '#item-availability-links > button:nth-child(3)') update.click() print(\"changed zip code\") def click_checkout_key(): \"\"\"", "RequestsWebhookAdapter import json # Loads config file json = json.load(open('config.json', 'r')) webhook =", "object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-guest-content__continue')) WebDriverWait(driver, 9).until(element_present) except TimeoutException: print(\"Timed", "waiting for page to load\") zip_code_click = driver.find_element_by_css_selector( \".change-zipcode-link\") zip_code_click.send_keys(Keys.ENTER) print(\"clicked on zip", "to the URL supplied, by default this is BestBuy.com \"\"\" driver.get(url) print(\"navigated to", "Not purchased as item has already been bought. \" \"To reset this please", "to cart :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, 'button.btn-primary:nth-child(1)')) WebDriverWait(driver, timeout).until(element_present)", "\" seconds due to product not being in stock\") sleep(randinteger) else: #print(\"Stock found", "Suggestions\" in suggestions.text: suggestions.click() print(\"suggestions removed\") address = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[3]/label[1]/div[2]/div[1]/div[1]/input[1]\") for i in", "add_to_cart() in_stock = 1 navigate_to_cart() change_zip_code_and_select_shipping() click_checkout_key() if guest_or_sign_in == \"sign-in\": sign_in_and_click_button() if", "clicking the final button :rtype: object \"\"\" button = driver.find_element_by_css_selector( '.btn-lg') button.click() def", "and then clicks the Sign In button. :rtype: object \"\"\" try: element_present =", "to load\") email = driver.find_element_by_id(\"fld-e\") email.send_keys(json['email']) print(\"email typed\") password = driver.find_element_by_id(\"fld-p1\") password.send_keys(json['password']) print(\"password", "= driver.find_element_by_css_selector( '.cia-form__controls__submit') button.click() print(\"signed in\") def check_if_verify(): \"\"\" This function checks if", "waiting for page to load\") fname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.firstName\") fname.send_keys(json['first_name']) print(\"fname typed\") lname", "= driver.find_element_by_css_selector( '.btn-lg') button.click() def input_payment_info(): \"\"\" This function inputs the CVV if", "\"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'optimized-cc-card-number')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out", "zip code\") zip_code_change = driver.find_element_by_css_selector( \"#location\") zip_code_change.send_keys(json['zip_code']) update = driver.find_element_by_css_selector( '#item-availability-links > button:nth-child(3)')", "to Cart button, and then adds the product to cart :rtype: object \"\"\"", "\"Hide Suggestions\" in suggestions.text: suggestions.click() print(\"suggestions removed\") address = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[3]/label[1]/div[2]/div[1]/div[1]/input[1]\") for i", "provided if they are checking out as a guest :rtype: object \"\"\" email", "out as a guest :rtype: object \"\"\" email = driver.find_element_by_id('user.emailAddress') email.send_keys(json['email']) phone =", "'consolidatedAddresses.ui_address_2.state')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code = driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.zipcode') zip_code.send_keys(json['zip_code']) print(\"zip code address section", "checkout_button.click() print(\"checkout started\") def select_guest_checkout(): \"\"\" This function selects the Checkout as Guest", "json = json.load(open('config.json', 'r')) webhook = Webhook.from_url( json['discord_webook'], adapter=RequestsWebhookAdapter()) # Creates webhook using", "driver.get(url + json['url']) def check_if_in_stock(): \"\"\" This function tries to find the Add", "EC.presence_of_element_located( (By.CSS_SELECTOR, 'button.btn-primary:nth-child(1)')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to", "json['discord_webook'], adapter=RequestsWebhookAdapter()) # Creates webhook using discord url driver = webdriver.Firefox( executable_path=json['executable_path']) #", "to load\") add_to_cart_button = driver.find_element_by_css_selector( \"button.btn-primary:nth-child(1)\") add_to_cart_button.click() print(\"added to cart\") def navigate_to_cart(): \"\"\"", "typed\") def input_shipping_info_guest(): \"\"\" This function inputs the shipping information that the user", "def change_zip_code_and_select_shipping(): \"\"\" This function first selects the ZipCode element on the cart", "True for in stock and False for not in stock :rtype: None Type", "print(\"clicked on zip code\") zip_code_change = driver.find_element_by_css_selector( \"#location\") zip_code_change.send_keys(json['zip_code']) update = driver.find_element_by_css_selector( '#item-availability-links", "password.send_keys(json['password']) print(\"password typed\") button = driver.find_element_by_css_selector( '.cia-form__controls__submit') button.click() print(\"signed in\") def check_if_verify(): \"\"\"", "+ the product URL \"\"\" driver.get(url + json['url']) def check_if_in_stock(): \"\"\" This function", "address = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[3]/label[1]/div[2]/div[1]/div[1]/input[1]\") for i in range(len(json['address'])): address.send_keys(json['address'][i]) print(\"street address typed\") city", "cvv.send_keys(json['cvv']) print(\"CVV added\") def input_payment_info_guest(): \"\"\" This function inputs the payment information of", "click_continue_to_payment_info() input_payment_info_guest() # place_order() time_end = time.time() time_diff = time_end - time_start webhook.send(", "password and then clicks the Sign In button. :rtype: object \"\"\" try: element_present", "\" typed\") lname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[2]/label[1]/div[1]/input[1]\") for i in range(len(json['last_name'])): lname.send_keys(json[\"last_name\"][i]) print(\"lname typed\")", "= driver.find_element_by_id(\"consolidatedAddresses.ui_address_2.city\") city.send_keys(json['city']) print(\"city typed\") select = Select(driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.state')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code", "the ZipCode element on the cart page, then types the correct zip code", "\" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: webhook.send( \"@everyone Not purchased", "EC from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select", "lname.send_keys(json[\"last_name\"][i]) print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\" in suggestions.text: suggestions.click() print(\"suggestions", "\"\"\" * Navigates to the URL supplied, by default this is BestBuy.com \"\"\"", "selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select from selenium.common.exceptions", "clicks update location. :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.change-zipcode-link')) WebDriverWait(driver,", "navigates to the BestBuy cart page \"\"\" driver.get(url + \"/cart\") print(\"navigated to cart\")", "button = driver.find_element_by_css_selector( '.btn-lg') button.click() def input_payment_info(): \"\"\" This function inputs the CVV", "fname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.firstName\") fname.send_keys(json['first_name']) print(\"fname typed\") lname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.lastName\") lname.send_keys(json[\"last_name\"]) print(\"lname", "for i in range(len(json['last_name'])): lname.send_keys(json[\"last_name\"][i]) print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\"", "as EC from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import", "webhook.send( \"@everyone Not purchased as item has already been bought. \" \"To reset", "webhook using discord url driver = webdriver.Firefox( executable_path=json['executable_path']) # Creates WebDriver instance url", "current page :rtype: object \"\"\" try: cvv = driver.find_element_by_id('credit-card-cvv') except NoSuchElementException: return False", "for i in range(len(json['first_name'])): fname.send_keys(json['first_name'][i]) print(json['first_name'] + \" typed\") lname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[2]/label[1]/div[1]/input[1]\")", "of stock currently and it throws a NoSuchElementException. :return: Returns True for in", "on zip code\") zip_code_change = driver.find_element_by_css_selector( \"#location\") zip_code_change.send_keys(json['zip_code']) update = driver.find_element_by_css_selector( '#item-availability-links >", "the checkout button on the BestBuy cart page :rtype: object \"\"\" checkout_button =", "to load\") zip_code_click = driver.find_element_by_css_selector( \".change-zipcode-link\") zip_code_click.send_keys(Keys.ENTER) print(\"clicked on zip code\") zip_code_change =", "element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, 'button.btn-primary:nth-child(1)')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for", "city.send_keys(json['city']) print(\"city typed\") select = Select(driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.state')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code = driver.find_element_by_id(", "config file json = json.load(open('config.json', 'r')) webhook = Webhook.from_url( json['discord_webook'], adapter=RequestsWebhookAdapter()) # Creates", "have selected to checkout as a guest :rtype: object \"\"\" fname = driver.find_element_by_xpath(", ":rtype: object \"\"\" button = driver.find_element_by_css_selector( '.btn-lg') button.click() def input_payment_info(): \"\"\" This function", "= EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, 3).until(element_present) except BaseException: return False return True def", "saved :rtype: object \"\"\" cvv = driver.find_element_by_id('credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV added\") def input_payment_info_guest(): \"\"\"", "WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by", "json.load(open('config.json', 'r')) webhook = Webhook.from_url( json['discord_webook'], adapter=RequestsWebhookAdapter()) # Creates webhook using discord url", "load\") email = driver.find_element_by_id(\"fld-e\") email.send_keys(json['email']) print(\"email typed\") password = driver.find_element_by_id(\"fld-p1\") password.send_keys(json['password']) print(\"password typed\")", "if the user has been signed in using the previous functions :rtype: object", "if \"Hide Suggestions\" in suggestions.text: suggestions.click() print(\"suggestions removed\") address = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[3]/label[1]/div[2]/div[1]/div[1]/input[1]\") for", "object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'optimized-cc-card-number')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed", ":rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, 'button.btn-primary:nth-child(1)')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException:", "zip code for shipping, and then clicks update location. :rtype: object \"\"\" try:", "= time.time() time_diff = time_end - time_start webhook.send( \"@everyone Purchased, Time elapsed: \"", "section typed\") def input_shipping_info_guest(): \"\"\" This function inputs the shipping information that the", "return True except NoSuchElementException: return False # return True def check_if_shipping_info_needed(): \"\"\" This", "load\") add_to_cart_button = driver.find_element_by_css_selector( \"button.btn-primary:nth-child(1)\") add_to_cart_button.click() print(\"added to cart\") def navigate_to_cart(): \"\"\" This", "* Navigates to the URL supplied + the product URL \"\"\" driver.get(url +", "shipping information if the user has been signed in using the previous functions", "driver.find_element_by_css_selector('#credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV typed\") def place_order(): \"\"\" This function places the order by", "removed\") address = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.street\") address.send_keys(json['address']) print(\"street address typed\") city = driver.find_element_by_id(\"consolidatedAddresses.ui_address_2.city\") city.send_keys(json['city'])", "function and has a card saved :rtype: object \"\"\" cvv = driver.find_element_by_id('credit-card-cvv') cvv.send_keys(json['cvv'])", "function first selects the ZipCode element on the cart page, then types the", "the final button :rtype: object \"\"\" button = driver.find_element_by_css_selector( '.btn-lg') button.click() def main(guest_or_sign_in):", "waiting for page to load\") cc_number = driver.find_element_by_id( 'optimized-cc-card-number') cc_number.send_keys(json['cc_number']) select = Select(driver.find_element_by_name(", "+ \" seconds due to product not being in stock\") sleep(randinteger) else: #print(\"Stock", "except BaseException: return False return True def input_shipping_information(): \"\"\" This function inputs the", "BestBuy.com \"\"\" driver.get(url) print(\"navigated to bestbuy\") def navigate_to_product(): \"\"\" * Navigates to the", "driver.find_element_by_id('credit-card-cvv') except NoSuchElementException: return False return True def click_continue_to_payment_info(): \"\"\" This function clicks", "selects the ZipCode element on the cart page, then types the correct zip", "(By.CSS_SELECTOR, '.cia-guest-content__continue')) WebDriverWait(driver, 9).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\")", "except NoSuchElementException: return False return True def click_continue_to_payment_info(): \"\"\" This function clicks the", "TimeoutException: print(\"Timed out waiting for page to load\") guest = driver.find_element_by_css_selector('.cia-guest-content__continue') guest.click() def", "previous function and has a card saved :rtype: object \"\"\" cvv = driver.find_element_by_id('credit-card-cvv')", "open('purchased.txt', 'w') json2.write('1') json2.close() else: if check_if_payment_info_on_page() is False: click_continue_to_payment_info() input_payment_info() # place_order()", "out waiting for page to load\") guest = driver.find_element_by_css_selector('.cia-guest-content__continue') guest.click() def sign_in_and_click_button(): \"\"\"", "print(\"state selected\") zip_code = driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.zipcode') zip_code.send_keys(json['zip_code']) print(\"zip code address section typed\") def", "\"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out", "'/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[2]/label[1]/div[1]/div[1]/select[1]')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code = driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[6]/div[1]/div[1]/label[1]/div[1]/input[1]') for i in range(len(json['zip_code'])): zip_code.send_keys(json['zip_code'][i])", "checks purchased = open('purchased.txt', 'r').read() def navigate_to_bb(): \"\"\" * Navigates to the URL", "function clicks the continue to payment information if the previous function returns False", "elif guest_or_sign_in == \"guest\": select_guest_checkout() # driver.refresh() input_shipping_info_guest() input_phone_and_email() click_continue_to_payment_info() input_payment_info_guest() # place_order()", "logged in with previous functions :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID,", "for page to load\") zip_code_click = driver.find_element_by_css_selector( \".change-zipcode-link\") zip_code_click.send_keys(Keys.ENTER) print(\"clicked on zip code\")", "selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select from selenium.common.exceptions import NoSuchElementException from random", "out waiting for page to load\") email = driver.find_element_by_id(\"fld-e\") email.send_keys(json['email']) print(\"email typed\") password", "= driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.street\") address.send_keys(json['address']) print(\"street address typed\") city = driver.find_element_by_id(\"consolidatedAddresses.ui_address_2.city\") city.send_keys(json['city']) print(\"city typed\")", "to load\") cc_number = driver.find_element_by_id( 'optimized-cc-card-number') cc_number.send_keys(json['cc_number']) select = Select(driver.find_element_by_name( 'expiration-month')) select.select_by_visible_text(json['month']) print(\"month", "for manual user verification :rtype: object \"\"\" try: verify = driver.find_element_by_css_selector( 'h1.cia-section-title').text if", "option on the page following the BestBuy cart :rtype: object \"\"\" try: element_present", "\"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-guest-content__continue')) WebDriverWait(driver, 9).until(element_present) except TimeoutException: print(\"Timed out", "This function checks to see if the bot needs to input the shipping", "continue to payment information if the previous function returns False :rtype: object \"\"\"", "function tries to find the Add To Cart button, if it does not", "correct zip code for shipping, and then clicks update location. :rtype: object \"\"\"", "page to load\") zip_code_click = driver.find_element_by_css_selector( \".change-zipcode-link\") zip_code_click.send_keys(Keys.ENTER) print(\"clicked on zip code\") zip_code_change", "print(\"Timed out waiting for page to load\") email = driver.find_element_by_id(\"fld-e\") email.send_keys(json['email']) print(\"email typed\")", "1 navigate_to_cart() change_zip_code_and_select_shipping() click_checkout_key() if guest_or_sign_in == \"sign-in\": sign_in_and_click_button() if not check_if_verify(): quit(0)", "has been signed in using the previous functions :rtype: object \"\"\" try: element_present", "\"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-form__controls__submit')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out", "In button. :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-form__controls__submit')) WebDriverWait(driver, timeout).until(element_present)", "time_start = 0 time_end = 0 if purchased.strip() == \"0\": in_stock = 0", "place_order() time_end = time.time() time_diff = time_end - time_start webhook.send( \"@everyone Purchased, Time", "zip_code = driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[6]/div[1]/div[1]/label[1]/div[1]/input[1]') for i in range(len(json['zip_code'])): zip_code.send_keys(json['zip_code'][i]) print(\"zip code address section", "False for not in stock :rtype: None Type \"\"\" try: not_sold_out = driver.find_element_by_css_selector(", "print(\"Timed out waiting for page to load\") guest = driver.find_element_by_css_selector('.cia-guest-content__continue') guest.click() def sign_in_and_click_button():", "the payment information of the user if they have selected Guest checkout :rtype:", "= driver.find_element_by_css_selector( \"#location\") zip_code_change.send_keys(json['zip_code']) update = driver.find_element_by_css_selector( '#item-availability-links > button:nth-child(3)') update.click() print(\"changed zip", "checkout :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'optimized-cc-card-number')) WebDriverWait(driver, timeout).until(element_present) except", "= time.time() add_to_cart() in_stock = 1 navigate_to_cart() change_zip_code_and_select_shipping() click_checkout_key() if guest_or_sign_in == \"sign-in\":", "print(\"fname typed\") lname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.lastName\") lname.send_keys(json[\"last_name\"]) print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if", "\"\"\" driver.get(url + \"/cart\") print(\"navigated to cart\") return driver.title def change_zip_code_and_select_shipping(): \"\"\" This", "update.click() print(\"changed zip code\") def click_checkout_key(): \"\"\" This function clicks the checkout button", "the Add To Cart button, if it does not find it, it means", ":rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException:", "def click_continue_to_payment_info(): \"\"\" This function clicks the continue to payment information if the", "range(len(json['zip_code'])): zip_code.send_keys(json['zip_code'][i]) print(\"zip code address section typed\") def input_phone_and_email(): \"\"\" This function inputs", "loaded checks purchased = open('purchased.txt', 'r').read() def navigate_to_bb(): \"\"\" * Navigates to the", "code\") def click_checkout_key(): \"\"\" This function clicks the checkout button on the BestBuy", "the CVV if the user has been logged in during a previous function", "in stock :rtype: None Type \"\"\" try: not_sold_out = driver.find_element_by_css_selector( 'button.btn-primary:nth-child(1)') except NoSuchElementException:", "been logged in with previous functions :rtype: object \"\"\" try: element_present = EC.presence_of_element_located(", "must enter payment information on the current page :rtype: object \"\"\" try: cvv", "# place_order() time_end = time.time() time_diff = time_end - time_start webhook.send( \"@everyone Purchased,", "for in stock and False for not in stock :rtype: None Type \"\"\"", "return False # return True def check_if_shipping_info_needed(): \"\"\" This function checks to see", "the user has been signed in using the previous functions :rtype: object \"\"\"", "address.send_keys(json['address']) print(\"street address typed\") city = driver.find_element_by_id(\"consolidatedAddresses.ui_address_2.city\") city.send_keys(json['city']) print(\"city typed\") select = Select(driver.find_element_by_id(", "phone number and email that the user has provided if they are checking", "function inputs the CVV if the user has been logged in during a", "has already been bought. \" \"To reset this please open purchased.txt and replace", "This function checks if the account has been flagged for manual user verification", "find the Add To Cart button, if it does not find it, it", "timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") add_to_cart_button = driver.find_element_by_css_selector(", "= driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.firstName\") fname.send_keys(json['first_name']) print(\"fname typed\") lname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.lastName\") lname.send_keys(json[\"last_name\"]) print(\"lname typed\")", "EC.presence_of_element_located( (By.CSS_SELECTOR, '.change-zipcode-link')) WebDriverWait(driver, 10).until(element_present) except TimeoutException: print(\"Timed out waiting for page to", "address section typed\") def input_shipping_info_guest(): \"\"\" This function inputs the shipping information that", "\"consolidatedAddresses.ui_address_2.street\") address.send_keys(json['address']) print(\"street address typed\") city = driver.find_element_by_id(\"consolidatedAddresses.ui_address_2.city\") city.send_keys(json['city']) print(\"city typed\") select =", "not being in stock\") sleep(randinteger) else: #print(\"Stock found - running script\") #webhook.send(\"@everyone Stock", "+ str(time_diff) + \" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: if", "discord url driver = webdriver.Firefox( executable_path=json['executable_path']) # Creates WebDriver instance url = \"https://www.bestbuy.com\"", "page to load\") cc_number = driver.find_element_by_id( 'optimized-cc-card-number') cc_number.send_keys(json['cc_number']) select = Select(driver.find_element_by_name( 'expiration-month')) select.select_by_visible_text(json['month'])", "as Guest option on the page following the BestBuy cart :rtype: object \"\"\"", "'r').read() def navigate_to_bb(): \"\"\" * Navigates to the URL supplied, by default this", "supplied + the product URL \"\"\" driver.get(url + json['url']) def check_if_in_stock(): \"\"\" This", "= driver.find_element_by_css_selector( 'h1.cia-section-title').text if \"Verify Your Account\" in verify: return False else: return", "driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[6]/div[1]/div[1]/label[1]/div[1]/input[1]') for i in range(len(json['zip_code'])): zip_code.send_keys(json['zip_code'][i]) print(\"zip code address section typed\") def", "if not check_if_verify(): quit(0) if check_if_shipping_info_needed() is True: input_shipping_information() if check_if_payment_info_on_page() is False:", "reset this please open purchased.txt and replace the 0 with a 1\") quit(0)", "False return True def click_continue_to_payment_info(): \"\"\" This function clicks the continue to payment", "This function tries to find the Add To Cart button, if it does", "print(\"city typed\") select = Select(driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[2]/label[1]/div[1]/div[1]/select[1]')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code = driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[6]/div[1]/div[1]/label[1]/div[1]/input[1]')", "item has already been bought. \" \"To reset this please open purchased.txt and", "signed in using the previous functions :rtype: object \"\"\" try: element_present = EC.presence_of_element_located(", "update location. :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.change-zipcode-link')) WebDriverWait(driver, 10).until(element_present)", "print(\"Timed out waiting for page to load\") zip_code_click = driver.find_element_by_css_selector( \".change-zipcode-link\") zip_code_click.send_keys(Keys.ENTER) print(\"clicked", "executable_path=json['executable_path']) # Creates WebDriver instance url = \"https://www.bestbuy.com\" timeout = 3 # Timeout", "json['url']) def check_if_in_stock(): \"\"\" This function tries to find the Add To Cart", "cc_number.send_keys(json['cc_number']) select = Select(driver.find_element_by_name( 'expiration-month')) select.select_by_visible_text(json['month']) print(\"month selected\") select = Select(driver.find_element_by_name( 'expiration-year')) select.select_by_visible_text(json['year'])", "'optimized-cc-card-number')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") cc_number", "json2.close() else: if check_if_payment_info_on_page() is False: click_continue_to_payment_info() input_payment_info() # place_order() time_end = time.time()", "for not in stock :rtype: None Type \"\"\" try: not_sold_out = driver.find_element_by_css_selector( 'button.btn-primary:nth-child(1)')", "driver.find_element_by_id('credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV added\") def input_payment_info_guest(): \"\"\" This function inputs the payment information", "it throws a NoSuchElementException. :return: Returns True for in stock and False for", "Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: webhook.send( \"@everyone Not purchased as", "to load\") guest = driver.find_element_by_css_selector('.cia-guest-content__continue') guest.click() def sign_in_and_click_button(): \"\"\" This function types the", "time_end - time_start webhook.send( \"@everyone Purchased, Time elapsed: \" + str(time_diff) + \"", "email = driver.find_element_by_id(\"fld-e\") email.send_keys(json['email']) print(\"email typed\") password = driver.find_element_by_id(\"fld-p1\") password.send_keys(json['password']) print(\"password typed\") button", "user provides if they have been logged in with previous functions :rtype: object", "added\") def input_payment_info_guest(): \"\"\" This function inputs the payment information of the user", "input_phone_and_email(): \"\"\" This function inputs the phone number and email that the user", "guest_or_sign_in == \"guest\": select_guest_checkout() # driver.refresh() input_shipping_info_guest() input_phone_and_email() click_continue_to_payment_info() input_payment_info_guest() # place_order() time_end", "typed\") def input_phone_and_email(): \"\"\" This function inputs the phone number and email that", "input_phone_and_email() click_continue_to_payment_info() input_payment_info_guest() # place_order() time_end = time.time() time_diff = time_end - time_start", "json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: input_payment_info() # place_order time_end = time.time()", "on the current page :rtype: object \"\"\" try: cvv = driver.find_element_by_id('credit-card-cvv') except NoSuchElementException:", "to find the Add To Cart button, if it does not find it,", "function navigates to the BestBuy cart page \"\"\" driver.get(url + \"/cart\") print(\"navigated to", "element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, 3).until(element_present) except BaseException: return False return True", "information that the user provides if they have been logged in with previous", "product URL \"\"\" driver.get(url + json['url']) def check_if_in_stock(): \"\"\" This function tries to", "in range(len(json['zip_code'])): zip_code.send_keys(json['zip_code'][i]) print(\"zip code address section typed\") def input_phone_and_email(): \"\"\" This function", "zip_code_click = driver.find_element_by_css_selector( \".change-zipcode-link\") zip_code_click.send_keys(Keys.ENTER) print(\"clicked on zip code\") zip_code_change = driver.find_element_by_css_selector( \"#location\")", "purchased = open('purchased.txt', 'r').read() def navigate_to_bb(): \"\"\" * Navigates to the URL supplied,", "= 0 while in_stock == 0: navigate_to_product() driver.implicitly_wait(0.3) y = check_if_in_stock() if not", "time import sleep from discord import Webhook, RequestsWebhookAdapter import json # Loads config", "types the correct zip code for shipping, and then clicks update location. :rtype:", "page :rtype: object \"\"\" try: cvv = driver.find_element_by_id('credit-card-cvv') except NoSuchElementException: return False return", "import randint from time import sleep from discord import Webhook, RequestsWebhookAdapter import json", "cart page, then types the correct zip code for shipping, and then clicks", "\"Verify Your Account\" in verify: return False else: return True except NoSuchElementException: return", "adapter=RequestsWebhookAdapter()) # Creates webhook using discord url driver = webdriver.Firefox( executable_path=json['executable_path']) # Creates", "email and password and then clicks the Sign In button. :rtype: object \"\"\"", "check_if_shipping_info_needed(): \"\"\" This function checks to see if the bot needs to input", "\" \"To reset this please open purchased.txt and replace the 0 with a", "have been logged in with previous functions :rtype: object \"\"\" try: element_present =", "select.select_by_visible_text(json['month']) print(\"month selected\") select = Select(driver.find_element_by_name( 'expiration-year')) select.select_by_visible_text(json['year']) print(\"year selected\") cvv = driver.find_element_by_css_selector('#credit-card-cvv')", "Checkout as Guest option on the page following the BestBuy cart :rtype: object", "object \"\"\" button = driver.find_element_by_css_selector( '.btn-lg') button.click() def main(guest_or_sign_in): time_start = 0 time_end", "selected\") zip_code = driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.zipcode') zip_code.send_keys(json['zip_code']) print(\"zip code address section typed\") def input_shipping_info_guest():", "* Navigates to the URL supplied, by default this is BestBuy.com \"\"\" driver.get(url)", "if they are checking out as a guest :rtype: object \"\"\" email =", "functions :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, timeout).until(element_present) except", "driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\" in suggestions.text: suggestions.click() print(\"suggestions removed\") address = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.street\")", "= EC.presence_of_element_located( (By.CSS_SELECTOR, 'button.btn-primary:nth-child(1)')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page", "'.btn-lg') button.click() def main(guest_or_sign_in): time_start = 0 time_end = 0 if purchased.strip() ==", "str(time_diff) + \" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: if check_if_payment_info_on_page()", "+ \"/cart\") print(\"navigated to cart\") return driver.title def change_zip_code_and_select_shipping(): \"\"\" This function first", "button on the BestBuy cart page :rtype: object \"\"\" checkout_button = driver.find_element_by_css_selector( \".btn-lg\")", "\"\"\" driver.get(url) print(\"navigated to bestbuy\") def navigate_to_product(): \"\"\" * Navigates to the URL", "selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.common.exceptions import TimeoutException", "\"\"\" This function navigates to the BestBuy cart page \"\"\" driver.get(url + \"/cart\")", "def place_order(): \"\"\" This function places the order by clicking the final button", "This function clicks the continue to payment information if the previous function returns", "if they have selected to checkout as a guest :rtype: object \"\"\" fname", "page following the BestBuy cart :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR,", "driver.find_element_by_css_selector( '#item-availability-links > button:nth-child(3)') update.click() print(\"changed zip code\") def click_checkout_key(): \"\"\" This function", "import TimeoutException from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import Select from selenium.common.exceptions import", "in\") def check_if_verify(): \"\"\" This function checks if the account has been flagged", "randint(1, 5) print( \"Sleeping for \" + str(randinteger) + \" seconds due to", "open('purchased.txt', 'w') json2.write('1') json2.close() else: input_payment_info() # place_order time_end = time.time() time_diff =", "\"\"\" This function checks if the account has been flagged for manual user", "time_start = time.time() add_to_cart() in_stock = 1 navigate_to_cart() change_zip_code_and_select_shipping() click_checkout_key() if guest_or_sign_in ==", "\"consolidatedAddresses.ui_address_2.firstName\") fname.send_keys(json['first_name']) print(\"fname typed\") lname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.lastName\") lname.send_keys(json[\"last_name\"]) print(\"lname typed\") suggestions =", "driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.lastName\") lname.send_keys(json[\"last_name\"]) print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\" in suggestions.text:", "the user has been logged in during a previous function and has a", "is BestBuy.com \"\"\" driver.get(url) print(\"navigated to bestbuy\") def navigate_to_product(): \"\"\" * Navigates to", "driver.find_element_by_id('user.phone') phone.send_keys(json['phone']) def check_if_payment_info_on_page(): \"\"\" This function checks if the bot must enter", "except TimeoutException: print(\"Timed out waiting for page to load\") add_to_cart_button = driver.find_element_by_css_selector( \"button.btn-primary:nth-child(1)\")", "waiting for page to load\") add_to_cart_button = driver.find_element_by_css_selector( \"button.btn-primary:nth-child(1)\") add_to_cart_button.click() print(\"added to cart\")", "zip_code_change = driver.find_element_by_css_selector( \"#location\") zip_code_change.send_keys(json['zip_code']) update = driver.find_element_by_css_selector( '#item-availability-links > button:nth-child(3)') update.click() print(\"changed", "user has been signed in using the previous functions :rtype: object \"\"\" try:", "= driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\" in suggestions.text: suggestions.click() print(\"suggestions removed\") address = driver.find_element_by_xpath(", "'expiration-month')) select.select_by_visible_text(json['month']) print(\"month selected\") select = Select(driver.find_element_by_name( 'expiration-year')) select.select_by_visible_text(json['year']) print(\"year selected\") cvv =", "= open('purchased.txt', 'r').read() def navigate_to_bb(): \"\"\" * Navigates to the URL supplied, by", "10).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") zip_code_click = driver.find_element_by_css_selector(", "inputs the shipping information that the user provides if they have selected to", "load\") guest = driver.find_element_by_css_selector('.cia-guest-content__continue') guest.click() def sign_in_and_click_button(): \"\"\" This function types the supplied", "json2.write('1') json2.close() else: webhook.send( \"@everyone Not purchased as item has already been bought.", "city.send_keys(json['city'][i]) print(\"city typed\") select = Select(driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[2]/label[1]/div[1]/div[1]/select[1]')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code = driver.find_element_by_xpath(", "return True def click_continue_to_payment_info(): \"\"\" This function clicks the continue to payment information", "\"\"\" This function inputs the payment information of the user if they have", "randinteger = randint(1, 5) print( \"Sleeping for \" + str(randinteger) + \" seconds", ":rtype: object \"\"\" cvv = driver.find_element_by_id('credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV added\") def input_payment_info_guest(): \"\"\" This", "True: input_shipping_information() if check_if_payment_info_on_page() is False: click_continue_to_payment_info() input_payment_info() # place_order() time_end = time.time()", "time_diff = time_end - time_start webhook.send( \"@everyone Purchased, Time elapsed: \" + str(time_diff)", "click_continue_to_payment_info() input_payment_info() # place_order() time_end = time.time() time_diff = time_end - time_start webhook.send(", "if \"Hide Suggestions\" in suggestions.text: suggestions.click() print(\"suggestions removed\") address = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.street\") address.send_keys(json['address'])", "\"/cart\") print(\"navigated to cart\") return driver.title def change_zip_code_and_select_shipping(): \"\"\" This function first selects", "print(\"Timed out waiting for page to load\") add_to_cart_button = driver.find_element_by_css_selector( \"button.btn-primary:nth-child(1)\") add_to_cart_button.click() print(\"added", "click_continue_to_payment_info(): \"\"\" This function clicks the continue to payment information if the previous", "select.select_by_visible_text(json['year']) print(\"year selected\") cvv = driver.find_element_by_css_selector('#credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV typed\") def place_order(): \"\"\" This", "flagged for manual user verification :rtype: object \"\"\" try: verify = driver.find_element_by_css_selector( 'h1.cia-section-title').text", "function checks to see if the bot needs to input the shipping information", "def click_checkout_key(): \"\"\" This function clicks the checkout button on the BestBuy cart", "'optimized-cc-card-number') cc_number.send_keys(json['cc_number']) select = Select(driver.find_element_by_name( 'expiration-month')) select.select_by_visible_text(json['month']) print(\"month selected\") select = Select(driver.find_element_by_name( 'expiration-year'))", "instance url = \"https://www.bestbuy.com\" timeout = 3 # Timeout for element loaded checks", "return False return True def add_to_cart(): \"\"\" This function finds the Add to", "str(randinteger) + \" seconds due to product not being in stock\") sleep(randinteger) else:", "in_stock == 0: navigate_to_product() driver.implicitly_wait(0.3) y = check_if_in_stock() if not y: in_stock =", "object \"\"\" checkout_button = driver.find_element_by_css_selector( \".btn-lg\") checkout_button.click() print(\"checkout started\") def select_guest_checkout(): \"\"\" This", "= 0 randinteger = randint(1, 5) print( \"Sleeping for \" + str(randinteger) +", "returns False :rtype: object \"\"\" button = driver.find_element_by_css_selector( '.btn-lg') button.click() def input_payment_info(): \"\"\"", "WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") cc_number =", "clicks the continue to payment information if the previous function returns False :rtype:", "typed\") lname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[2]/label[1]/div[1]/input[1]\") for i in range(len(json['last_name'])): lname.send_keys(json[\"last_name\"][i]) print(\"lname typed\") suggestions", "in stock\") sleep(randinteger) else: #print(\"Stock found - running script\") #webhook.send(\"@everyone Stock Found\") #webhook.send(url", "else: if check_if_payment_info_on_page() is False: click_continue_to_payment_info() input_payment_info() # place_order() time_end = time.time() time_diff", "= driver.find_element_by_id('credit-card-cvv') except NoSuchElementException: return False return True def click_continue_to_payment_info(): \"\"\" This function", "user provides if they have selected to checkout as a guest :rtype: object", "already been bought. \" \"To reset this please open purchased.txt and replace the", "url driver = webdriver.Firefox( executable_path=json['executable_path']) # Creates WebDriver instance url = \"https://www.bestbuy.com\" timeout", "the previous functions :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver,", "try: element_present = EC.presence_of_element_located( (By.ID, 'optimized-cc-card-number')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting", "check_if_payment_info_on_page() is False: click_continue_to_payment_info() input_payment_info() # place_order() time_end = time.time() time_diff = time_end", "bestbuy\") def navigate_to_product(): \"\"\" * Navigates to the URL supplied + the product", "that the user provides if they have selected to checkout as a guest", "input_payment_info() # place_order time_end = time.time() time_diff = time_end - time_start webhook.send( \"@everyone", "Sign In button. :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-form__controls__submit')) WebDriverWait(driver,", "This function checks if the bot must enter payment information on the current", "button:nth-child(3)') update.click() print(\"changed zip code\") def click_checkout_key(): \"\"\" This function clicks the checkout", "+ \" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: input_payment_info() # place_order", "= \"https://www.bestbuy.com\" timeout = 3 # Timeout for element loaded checks purchased =", "navigate_to_cart(): \"\"\" This function navigates to the BestBuy cart page \"\"\" driver.get(url +", "print(\"navigated to cart\") return driver.title def change_zip_code_and_select_shipping(): \"\"\" This function first selects the", "driver.find_element_by_id( 'optimized-cc-card-number') cc_number.send_keys(json['cc_number']) select = Select(driver.find_element_by_name( 'expiration-month')) select.select_by_visible_text(json['month']) print(\"month selected\") select = Select(driver.find_element_by_name(", "code\") zip_code_change = driver.find_element_by_css_selector( \"#location\") zip_code_change.send_keys(json['zip_code']) update = driver.find_element_by_css_selector( '#item-availability-links > button:nth-child(3)') update.click()", "return False return True def input_shipping_information(): \"\"\" This function inputs the shipping information", "= Select(driver.find_element_by_name( 'expiration-month')) select.select_by_visible_text(json['month']) print(\"month selected\") select = Select(driver.find_element_by_name( 'expiration-year')) select.select_by_visible_text(json['year']) print(\"year selected\")", "fname.send_keys(json['first_name'][i]) print(json['first_name'] + \" typed\") lname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[2]/label[1]/div[1]/input[1]\") for i in range(len(json['last_name'])):", "for page to load\") cc_number = driver.find_element_by_id( 'optimized-cc-card-number') cc_number.send_keys(json['cc_number']) select = Select(driver.find_element_by_name( 'expiration-month'))", "typed\") select = Select(driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[2]/label[1]/div[1]/div[1]/select[1]')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code = driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[6]/div[1]/div[1]/label[1]/div[1]/input[1]') for", "print(\"zip code address section typed\") def input_phone_and_email(): \"\"\" This function inputs the phone", "during a previous function and has a card saved :rtype: object \"\"\" cvv", "\"sign-in\": sign_in_and_click_button() if not check_if_verify(): quit(0) if check_if_shipping_info_needed() is True: input_shipping_information() if check_if_payment_info_on_page()", "try: element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, 3).until(element_present) except BaseException: return False return", "navigate_to_product() driver.implicitly_wait(0.3) y = check_if_in_stock() if not y: in_stock = 0 randinteger =", "def check_if_shipping_info_needed(): \"\"\" This function checks to see if the bot needs to", "print(\"zip code address section typed\") def input_shipping_info_guest(): \"\"\" This function inputs the shipping", "TimeoutException: print(\"Timed out waiting for page to load\") email = driver.find_element_by_id(\"fld-e\") email.send_keys(json['email']) print(\"email", "input_payment_info(): \"\"\" This function inputs the CVV if the user has been logged", "0 time_end = 0 if purchased.strip() == \"0\": in_stock = 0 while in_stock", "range(len(json['first_name'])): fname.send_keys(json['first_name'][i]) print(json['first_name'] + \" typed\") lname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[2]/label[1]/div[1]/input[1]\") for i in", "load\") zip_code_click = driver.find_element_by_css_selector( \".change-zipcode-link\") zip_code_click.send_keys(Keys.ENTER) print(\"clicked on zip code\") zip_code_change = driver.find_element_by_css_selector(", "True def check_if_shipping_info_needed(): \"\"\" This function checks to see if the bot needs", "they have selected to checkout as a guest :rtype: object \"\"\" fname =", ":rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-guest-content__continue')) WebDriverWait(driver, 9).until(element_present) except TimeoutException:", "\"@everyone Not purchased as item has already been bought. \" \"To reset this", "places the order by clicking the final button :rtype: object \"\"\" button =", "'.change-zipcode-link')) WebDriverWait(driver, 10).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") zip_code_click", ":return: Returns True for in stock and False for not in stock :rtype:", "\"\"\" cvv = driver.find_element_by_id('credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV added\") def input_payment_info_guest(): \"\"\" This function inputs", "fname.send_keys(json['first_name']) print(\"fname typed\") lname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.lastName\") lname.send_keys(json[\"last_name\"]) print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\")", "select = Select(driver.find_element_by_name( 'expiration-month')) select.select_by_visible_text(json['month']) print(\"month selected\") select = Select(driver.find_element_by_name( 'expiration-year')) select.select_by_visible_text(json['year']) print(\"year", "try: verify = driver.find_element_by_css_selector( 'h1.cia-section-title').text if \"Verify Your Account\" in verify: return False", "Time elapsed: \" + str(time_diff) + \" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1')", "types the supplied email and password and then clicks the Sign In button.", "'.cia-form__controls__submit') button.click() print(\"signed in\") def check_if_verify(): \"\"\" This function checks if the account", "object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, 3).until(element_present) except BaseException: return", "json2.write('1') json2.close() elif guest_or_sign_in == \"guest\": select_guest_checkout() # driver.refresh() input_shipping_info_guest() input_phone_and_email() click_continue_to_payment_info() input_payment_info_guest()", "supplied email and password and then clicks the Sign In button. :rtype: object", "print(\"suggestions removed\") address = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.street\") address.send_keys(json['address']) print(\"street address typed\") city = driver.find_element_by_id(\"consolidatedAddresses.ui_address_2.city\")", "change_zip_code_and_select_shipping(): \"\"\" This function first selects the ZipCode element on the cart page,", "timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") cc_number = driver.find_element_by_id(", "\"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.change-zipcode-link')) WebDriverWait(driver, 10).until(element_present) except TimeoutException: print(\"Timed out", "'w') json2.write('1') json2.close() else: input_payment_info() # place_order time_end = time.time() time_diff = time_end", "on the cart page, then types the correct zip code for shipping, and", "False return True def add_to_cart(): \"\"\" This function finds the Add to Cart", "except TimeoutException: print(\"Timed out waiting for page to load\") cc_number = driver.find_element_by_id( 'optimized-cc-card-number')", "print(\"added to cart\") def navigate_to_cart(): \"\"\" This function navigates to the BestBuy cart", "timeout = 3 # Timeout for element loaded checks purchased = open('purchased.txt', 'r').read()", "\"\"\" This function clicks the checkout button on the BestBuy cart page :rtype:", "select = Select(driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[2]/label[1]/div[1]/div[1]/select[1]')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code = driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[6]/div[1]/div[1]/label[1]/div[1]/input[1]') for i", "account has been flagged for manual user verification :rtype: object \"\"\" try: verify", "input_payment_info_guest(): \"\"\" This function inputs the payment information of the user if they", "= driver.find_element_by_id( 'optimized-cc-card-number') cc_number.send_keys(json['cc_number']) select = Select(driver.find_element_by_name( 'expiration-month')) select.select_by_visible_text(json['month']) print(\"month selected\") select =", "= driver.find_element_by_css_selector( 'button.btn-primary:nth-child(1)') except NoSuchElementException: return False return True def add_to_cart(): \"\"\" This", "This function navigates to the BestBuy cart page \"\"\" driver.get(url + \"/cart\") print(\"navigated", "function returns False :rtype: object \"\"\" button = driver.find_element_by_css_selector( '.btn-lg') button.click() def input_payment_info():", "\".change-zipcode-link\") zip_code_click.send_keys(Keys.ENTER) print(\"clicked on zip code\") zip_code_change = driver.find_element_by_css_selector( \"#location\") zip_code_change.send_keys(json['zip_code']) update =", "add_to_cart_button.click() print(\"added to cart\") def navigate_to_cart(): \"\"\" This function navigates to the BestBuy", "select.select_by_visible_text(json['state']) print(\"state selected\") zip_code = driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.zipcode') zip_code.send_keys(json['zip_code']) print(\"zip code address section typed\")", "\" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: input_payment_info() # place_order time_end", "= driver.find_element_by_css_selector( '#item-availability-links > button:nth-child(3)') update.click() print(\"changed zip code\") def click_checkout_key(): \"\"\" This", "expected_conditions as EC from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import By from selenium.webdriver.support.ui", "import time from selenium import webdriver from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import", "+ json['url']) time_start = time.time() add_to_cart() in_stock = 1 navigate_to_cart() change_zip_code_and_select_shipping() click_checkout_key() if", "supplied, by default this is BestBuy.com \"\"\" driver.get(url) print(\"navigated to bestbuy\") def navigate_to_product():", "+ \" typed\") lname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[2]/label[1]/div[1]/input[1]\") for i in range(len(json['last_name'])): lname.send_keys(json[\"last_name\"][i]) print(\"lname", "have selected Guest checkout :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'optimized-cc-card-number'))", "WebDriverWait(driver, 9).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") guest =", "in stock and False for not in stock :rtype: None Type \"\"\" try:", "'#item-availability-links > button:nth-child(3)') update.click() print(\"changed zip code\") def click_checkout_key(): \"\"\" This function clicks", "order by clicking the final button :rtype: object \"\"\" button = driver.find_element_by_css_selector( '.btn-lg')", "not_sold_out = driver.find_element_by_css_selector( 'button.btn-primary:nth-child(1)') except NoSuchElementException: return False return True def add_to_cart(): \"\"\"", "not in stock :rtype: None Type \"\"\" try: not_sold_out = driver.find_element_by_css_selector( 'button.btn-primary:nth-child(1)') except", "button :rtype: object \"\"\" button = driver.find_element_by_css_selector( '.btn-lg') button.click() def main(guest_or_sign_in): time_start =", "the user provides if they have been logged in with previous functions :rtype:", "object \"\"\" cvv = driver.find_element_by_id('credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV added\") def input_payment_info_guest(): \"\"\" This function", "provides if they have been logged in with previous functions :rtype: object \"\"\"", "guest.click() def sign_in_and_click_button(): \"\"\" This function types the supplied email and password and", "Guest checkout :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'optimized-cc-card-number')) WebDriverWait(driver, timeout).until(element_present)", "if it does not find it, it means it is out of stock", "\"\"\" This function first selects the ZipCode element on the cart page, then", "select_guest_checkout(): \"\"\" This function selects the Checkout as Guest option on the page", "(By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\")", "print(\"state selected\") zip_code = driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[6]/div[1]/div[1]/label[1]/div[1]/input[1]') for i in range(len(json['zip_code'])): zip_code.send_keys(json['zip_code'][i]) print(\"zip code", "Returns True for in stock and False for not in stock :rtype: None", "function places the order by clicking the final button :rtype: object \"\"\" button", "Navigates to the URL supplied, by default this is BestBuy.com \"\"\" driver.get(url) print(\"navigated", "input the shipping information if the user has been signed in using the", "button = driver.find_element_by_css_selector( '.btn-lg') button.click() def main(guest_or_sign_in): time_start = 0 time_end = 0", "driver.find_element_by_id(\"fld-p1\") password.send_keys(json['password']) print(\"password typed\") button = driver.find_element_by_css_selector( '.cia-form__controls__submit') button.click() print(\"signed in\") def check_if_verify():", "from time import sleep from discord import Webhook, RequestsWebhookAdapter import json # Loads", "This function clicks the checkout button on the BestBuy cart page :rtype: object", "the BestBuy cart page \"\"\" driver.get(url + \"/cart\") print(\"navigated to cart\") return driver.title", "inputs the CVV if the user has been logged in during a previous", "from selenium.webdriver.support.ui import Select from selenium.common.exceptions import NoSuchElementException from random import randint from", "\" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() elif guest_or_sign_in == \"guest\": select_guest_checkout()", "= EC.presence_of_element_located( (By.ID, 'optimized-cc-card-number')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page", "try: not_sold_out = driver.find_element_by_css_selector( 'button.btn-primary:nth-child(1)') except NoSuchElementException: return False return True def add_to_cart():", "else: input_payment_info() # place_order time_end = time.time() time_diff = time_end - time_start webhook.send(", "navigate_to_cart() change_zip_code_and_select_shipping() click_checkout_key() if guest_or_sign_in == \"sign-in\": sign_in_and_click_button() if not check_if_verify(): quit(0) if", "print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\" in suggestions.text: suggestions.click() print(\"suggestions removed\")", "typed\") lname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.lastName\") lname.send_keys(json[\"last_name\"]) print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide", "payment information on the current page :rtype: object \"\"\" try: cvv = driver.find_element_by_id('credit-card-cvv')", "= driver.find_element_by_css_selector('.cia-guest-content__continue') guest.click() def sign_in_and_click_button(): \"\"\" This function types the supplied email and", "import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from", "suggestions.click() print(\"suggestions removed\") address = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[3]/label[1]/div[2]/div[1]/div[1]/input[1]\") for i in range(len(json['address'])): address.send_keys(json['address'][i]) print(\"street", "= Select(driver.find_element_by_name( 'expiration-year')) select.select_by_visible_text(json['year']) print(\"year selected\") cvv = driver.find_element_by_css_selector('#credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV typed\") def", "(By.CSS_SELECTOR, 'button.btn-primary:nth-child(1)')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\")", "range(len(json['city'])): city.send_keys(json['city'][i]) print(\"city typed\") select = Select(driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[2]/label[1]/div[1]/div[1]/select[1]')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code =", "\"To reset this please open purchased.txt and replace the 0 with a 1\")", "to load\") fname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.firstName\") fname.send_keys(json['first_name']) print(\"fname typed\") lname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.lastName\")", "\"\"\" email = driver.find_element_by_id('user.emailAddress') email.send_keys(json['email']) phone = driver.find_element_by_id('user.phone') phone.send_keys(json['phone']) def check_if_payment_info_on_page(): \"\"\" This", "False # return True def check_if_shipping_info_needed(): \"\"\" This function checks to see if", "to bestbuy\") def navigate_to_product(): \"\"\" * Navigates to the URL supplied + the", "cart :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-guest-content__continue')) WebDriverWait(driver, 9).until(element_present) except", "import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.common.exceptions import TimeoutException from", "in range(len(json['last_name'])): lname.send_keys(json[\"last_name\"][i]) print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\" in suggestions.text:", "input_payment_info() # place_order() time_end = time.time() time_diff = time_end - time_start webhook.send( \"@everyone", "for page to load\") add_to_cart_button = driver.find_element_by_css_selector( \"button.btn-primary:nth-child(1)\") add_to_cart_button.click() print(\"added to cart\") def", "- time_start webhook.send( \"@everyone Purchased, Time elapsed: \" + str(time_diff) + \" Seconds\")", "\" + str(time_diff) + \" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() elif", "\".btn-lg\") checkout_button.click() print(\"checkout started\") def select_guest_checkout(): \"\"\" This function selects the Checkout as", "EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-guest-content__continue')) WebDriverWait(driver, 9).until(element_present) except TimeoutException: print(\"Timed out waiting for page to", "return driver.title def change_zip_code_and_select_shipping(): \"\"\" This function first selects the ZipCode element on", "driver.find_element_by_css_selector( '.btn-lg') button.click() def input_payment_info(): \"\"\" This function inputs the CVV if the", "not y: in_stock = 0 randinteger = randint(1, 5) print( \"Sleeping for \"", "driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.street\") address.send_keys(json['address']) print(\"street address typed\") city = driver.find_element_by_id(\"consolidatedAddresses.ui_address_2.city\") city.send_keys(json['city']) print(\"city typed\") select", "json2 = open('purchased.txt', 'w') json2.write('1') json2.close() elif guest_or_sign_in == \"guest\": select_guest_checkout() # driver.refresh()", "input_payment_info_guest() # place_order() time_end = time.time() time_diff = time_end - time_start webhook.send( \"@everyone", "out waiting for page to load\") add_to_cart_button = driver.find_element_by_css_selector( \"button.btn-primary:nth-child(1)\") add_to_cart_button.click() print(\"added to", "add_to_cart_button = driver.find_element_by_css_selector( \"button.btn-primary:nth-child(1)\") add_to_cart_button.click() print(\"added to cart\") def navigate_to_cart(): \"\"\" This function", "the URL supplied + the product URL \"\"\" driver.get(url + json['url']) def check_if_in_stock():", "number and email that the user has provided if they are checking out", "Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: if check_if_payment_info_on_page() is False: click_continue_to_payment_info()", "else: webhook.send( \"@everyone Not purchased as item has already been bought. \" \"To", "cart page \"\"\" driver.get(url + \"/cart\") print(\"navigated to cart\") return driver.title def change_zip_code_and_select_shipping():", "the current page :rtype: object \"\"\" try: cvv = driver.find_element_by_id('credit-card-cvv') except NoSuchElementException: return", "NoSuchElementException: return False return True def click_continue_to_payment_info(): \"\"\" This function clicks the continue", "function finds the Add to Cart button, and then adds the product to", "for shipping, and then clicks update location. :rtype: object \"\"\" try: element_present =", "\"\"\" This function selects the Checkout as Guest option on the page following", "= json.load(open('config.json', 'r')) webhook = Webhook.from_url( json['discord_webook'], adapter=RequestsWebhookAdapter()) # Creates webhook using discord", "\"\"\" driver.get(url + json['url']) def check_if_in_stock(): \"\"\" This function tries to find the", "information that the user provides if they have selected to checkout as a", "file json = json.load(open('config.json', 'r')) webhook = Webhook.from_url( json['discord_webook'], adapter=RequestsWebhookAdapter()) # Creates webhook", "print(\"checkout started\") def select_guest_checkout(): \"\"\" This function selects the Checkout as Guest option", "object \"\"\" fname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['first_name'])): fname.send_keys(json['first_name'][i]) print(json['first_name'] +", "by clicking the final button :rtype: object \"\"\" button = driver.find_element_by_css_selector( '.btn-lg') button.click()", "out waiting for page to load\") zip_code_click = driver.find_element_by_css_selector( \".change-zipcode-link\") zip_code_click.send_keys(Keys.ENTER) print(\"clicked on", "Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: input_payment_info() # place_order time_end =", "zip code\") def click_checkout_key(): \"\"\" This function clicks the checkout button on the", "print(\"CVV added\") def input_payment_info_guest(): \"\"\" This function inputs the payment information of the", "element on the cart page, then types the correct zip code for shipping,", ":rtype: object \"\"\" fname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['first_name'])): fname.send_keys(json['first_name'][i]) print(json['first_name']", "cvv = driver.find_element_by_id('credit-card-cvv') except NoSuchElementException: return False return True def click_continue_to_payment_info(): \"\"\" This", "object \"\"\" email = driver.find_element_by_id('user.emailAddress') email.send_keys(json['email']) phone = driver.find_element_by_id('user.phone') phone.send_keys(json['phone']) def check_if_payment_info_on_page(): \"\"\"", "Creates webhook using discord url driver = webdriver.Firefox( executable_path=json['executable_path']) # Creates WebDriver instance", "zip_code = driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.zipcode') zip_code.send_keys(json['zip_code']) print(\"zip code address section typed\") def input_shipping_info_guest(): \"\"\"", ":rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'optimized-cc-card-number')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException:", "timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") fname = driver.find_element_by_id(", "selenium.webdriver.support import expected_conditions as EC from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import By", "in_stock = 0 randinteger = randint(1, 5) print( \"Sleeping for \" + str(randinteger)", "#print(\"Stock found - running script\") #webhook.send(\"@everyone Stock Found\") #webhook.send(url + json['url']) time_start =", "for page to load\") guest = driver.find_element_by_css_selector('.cia-guest-content__continue') guest.click() def sign_in_and_click_button(): \"\"\" This function", "except TimeoutException: print(\"Timed out waiting for page to load\") email = driver.find_element_by_id(\"fld-e\") email.send_keys(json['email'])", "True except NoSuchElementException: return False # return True def check_if_shipping_info_needed(): \"\"\" This function", "= driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[2]/label[1]/div[1]/input[1]\") for i in range(len(json['last_name'])): lname.send_keys(json[\"last_name\"][i]) print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\")", "'expiration-year')) select.select_by_visible_text(json['year']) print(\"year selected\") cvv = driver.find_element_by_css_selector('#credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV typed\") def place_order(): \"\"\"", "import expected_conditions as EC from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import By from", "zip_code_change.send_keys(json['zip_code']) update = driver.find_element_by_css_selector( '#item-availability-links > button:nth-child(3)') update.click() print(\"changed zip code\") def click_checkout_key():", "BaseException: return False return True def input_shipping_information(): \"\"\" This function inputs the shipping", "and then adds the product to cart :rtype: object \"\"\" try: element_present =", "to the URL supplied + the product URL \"\"\" driver.get(url + json['url']) def", "import Webhook, RequestsWebhookAdapter import json # Loads config file json = json.load(open('config.json', 'r'))", "bot must enter payment information on the current page :rtype: object \"\"\" try:", "guest :rtype: object \"\"\" email = driver.find_element_by_id('user.emailAddress') email.send_keys(json['email']) phone = driver.find_element_by_id('user.phone') phone.send_keys(json['phone']) def", "function checks if the account has been flagged for manual user verification :rtype:", "check_if_verify(): quit(0) if check_if_shipping_info_needed() is True: input_shipping_information() if check_if_payment_info_on_page() is False: click_continue_to_payment_info() input_payment_info()", "this is BestBuy.com \"\"\" driver.get(url) print(\"navigated to bestbuy\") def navigate_to_product(): \"\"\" * Navigates", "the Checkout as Guest option on the page following the BestBuy cart :rtype:", "= driver.find_element_by_id(\"fld-e\") email.send_keys(json['email']) print(\"email typed\") password = driver.find_element_by_id(\"fld-p1\") password.send_keys(json['password']) print(\"password typed\") button =", "WebDriverWait(driver, 10).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") zip_code_click =", "the Sign In button. :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-form__controls__submit'))", "print(\"suggestions removed\") address = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[3]/label[1]/div[2]/div[1]/div[1]/input[1]\") for i in range(len(json['address'])): address.send_keys(json['address'][i]) print(\"street address", "as a guest :rtype: object \"\"\" email = driver.find_element_by_id('user.emailAddress') email.send_keys(json['email']) phone = driver.find_element_by_id('user.phone')", "in during a previous function and has a card saved :rtype: object \"\"\"", "\"Sleeping for \" + str(randinteger) + \" seconds due to product not being", "try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-form__controls__submit')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting", "the BestBuy cart page :rtype: object \"\"\" checkout_button = driver.find_element_by_css_selector( \".btn-lg\") checkout_button.click() print(\"checkout", "as a guest :rtype: object \"\"\" fname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[1]/label[1]/div[1]/input[1]\") for i in", "in range(len(json['city'])): city.send_keys(json['city'][i]) print(\"city typed\") select = Select(driver.find_element_by_xpath( '/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[2]/label[1]/div[1]/div[1]/select[1]')) select.select_by_visible_text(json['state']) print(\"state selected\") zip_code", "fname = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['first_name'])): fname.send_keys(json['first_name'][i]) print(json['first_name'] + \" typed\")", "# place_order time_end = time.time() time_diff = time_end - time_start webhook.send( \"@everyone Purchased,", "phone = driver.find_element_by_id('user.phone') phone.send_keys(json['phone']) def check_if_payment_info_on_page(): \"\"\" This function checks if the bot", "previous functions :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, 3).until(element_present)", "= time_end - time_start webhook.send( \"@everyone Purchased, Time elapsed: \" + str(time_diff) +", "= driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['city'])): city.send_keys(json['city'][i]) print(\"city typed\") select = Select(driver.find_element_by_xpath(", "'.cia-guest-content__continue')) WebDriverWait(driver, 9).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\") guest", "i in range(len(json['address'])): address.send_keys(json['address'][i]) print(\"street address typed\") city = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[1]/label[1]/div[1]/input[1]\") for i", "a NoSuchElementException. :return: Returns True for in stock and False for not in", "driver.find_element_by_id( 'consolidatedAddresses.ui_address_2.zipcode') zip_code.send_keys(json['zip_code']) print(\"zip code address section typed\") def input_shipping_info_guest(): \"\"\" This function", "sleep(randinteger) else: #print(\"Stock found - running script\") #webhook.send(\"@everyone Stock Found\") #webhook.send(url + json['url'])", "def navigate_to_bb(): \"\"\" * Navigates to the URL supplied, by default this is", "and False for not in stock :rtype: None Type \"\"\" try: not_sold_out =", "if check_if_shipping_info_needed() is True: input_shipping_information() if check_if_payment_info_on_page() is False: click_continue_to_payment_info() input_payment_info() # place_order()", "CVV if the user has been logged in during a previous function and", "the shipping information that the user provides if they have selected to checkout", "driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\" in suggestions.text: suggestions.click() print(\"suggestions removed\") address = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[3]/label[1]/div[2]/div[1]/div[1]/input[1]\")", "time from selenium import webdriver from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait", "Webhook, RequestsWebhookAdapter import json # Loads config file json = json.load(open('config.json', 'r')) webhook", "Webhook.from_url( json['discord_webook'], adapter=RequestsWebhookAdapter()) # Creates webhook using discord url driver = webdriver.Firefox( executable_path=json['executable_path'])", "time_start webhook.send( \"@everyone Purchased, Time elapsed: \" + str(time_diff) + \" Seconds\") json2", "json2.write('1') json2.close() else: input_payment_info() # place_order time_end = time.time() time_diff = time_end -", "selenium.common.exceptions import NoSuchElementException from random import randint from time import sleep from discord", "def check_if_verify(): \"\"\" This function checks if the account has been flagged for", "typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\" in suggestions.text: suggestions.click() print(\"suggestions removed\") address", "the continue to payment information if the previous function returns False :rtype: object", "running script\") #webhook.send(\"@everyone Stock Found\") #webhook.send(url + json['url']) time_start = time.time() add_to_cart() in_stock", "not check_if_verify(): quit(0) if check_if_shipping_info_needed() is True: input_shipping_information() if check_if_payment_info_on_page() is False: click_continue_to_payment_info()", "it is out of stock currently and it throws a NoSuchElementException. :return: Returns", "from selenium.common.exceptions import NoSuchElementException from random import randint from time import sleep from", "By from selenium.webdriver.support.ui import Select from selenium.common.exceptions import NoSuchElementException from random import randint", "(By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, 3).until(element_present) except BaseException: return False return True def input_shipping_information(): \"\"\"", "\"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[2]/label[1]/div[1]/input[1]\") for i in range(len(json['last_name'])): lname.send_keys(json[\"last_name\"][i]) print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide", "page to load\") guest = driver.find_element_by_css_selector('.cia-guest-content__continue') guest.click() def sign_in_and_click_button(): \"\"\" This function types", "driver.find_element_by_css_selector( 'h1.cia-section-title').text if \"Verify Your Account\" in verify: return False else: return True", "try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-guest-content__continue')) WebDriverWait(driver, 9).until(element_present) except TimeoutException: print(\"Timed out waiting", "This function finds the Add to Cart button, and then adds the product", "try: element_present = EC.presence_of_element_located( (By.ID, 'consolidatedAddresses.ui_address_2.firstName')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting", "function checks if the bot must enter payment information on the current page", "payment information of the user if they have selected Guest checkout :rtype: object", "Select(driver.find_element_by_name( 'expiration-month')) select.select_by_visible_text(json['month']) print(\"month selected\") select = Select(driver.find_element_by_name( 'expiration-year')) select.select_by_visible_text(json['year']) print(\"year selected\") cvv", "stock\") sleep(randinteger) else: #print(\"Stock found - running script\") #webhook.send(\"@everyone Stock Found\") #webhook.send(url +", "json2.write('1') json2.close() else: if check_if_payment_info_on_page() is False: click_continue_to_payment_info() input_payment_info() # place_order() time_end =", "started\") def select_guest_checkout(): \"\"\" This function selects the Checkout as Guest option on", "card saved :rtype: object \"\"\" cvv = driver.find_element_by_id('credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV added\") def input_payment_info_guest():", "= driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.lastName\") lname.send_keys(json[\"last_name\"]) print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\" in", "lname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.lastName\") lname.send_keys(json[\"last_name\"]) print(\"lname typed\") suggestions = driver.find_element_by_css_selector(\".autocomplete__toggle\") if \"Hide Suggestions\"", "str(time_diff) + \" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: input_payment_info() #", "str(time_diff) + \" Seconds\") json2 = open('purchased.txt', 'w') json2.write('1') json2.close() else: webhook.send( \"@everyone", "random import randint from time import sleep from discord import Webhook, RequestsWebhookAdapter import", "the supplied email and password and then clicks the Sign In button. :rtype:", "from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as", "def navigate_to_product(): \"\"\" * Navigates to the URL supplied + the product URL", "out waiting for page to load\") fname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.firstName\") fname.send_keys(json['first_name']) print(\"fname typed\")", "in_stock = 0 while in_stock == 0: navigate_to_product() driver.implicitly_wait(0.3) y = check_if_in_stock() if", "input_shipping_information() if check_if_payment_info_on_page() is False: click_continue_to_payment_info() input_payment_info() # place_order() time_end = time.time() time_diff", "they are checking out as a guest :rtype: object \"\"\" email = driver.find_element_by_id('user.emailAddress')", "verify = driver.find_element_by_css_selector( 'h1.cia-section-title').text if \"Verify Your Account\" in verify: return False else:", "cvv = driver.find_element_by_css_selector('#credit-card-cvv') cvv.send_keys(json['cvv']) print(\"CVV typed\") def place_order(): \"\"\" This function places the", "(By.CSS_SELECTOR, '.change-zipcode-link')) WebDriverWait(driver, 10).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\")", "else: #print(\"Stock found - running script\") #webhook.send(\"@everyone Stock Found\") #webhook.send(url + json['url']) time_start", "sign_in_and_click_button(): \"\"\" This function types the supplied email and password and then clicks", "object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-form__controls__submit')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed", "return False return True def click_continue_to_payment_info(): \"\"\" This function clicks the continue to", "button. :rtype: object \"\"\" try: element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.cia-form__controls__submit')) WebDriverWait(driver, timeout).until(element_present) except", "it, it means it is out of stock currently and it throws a", "the product URL \"\"\" driver.get(url + json['url']) def check_if_in_stock(): \"\"\" This function tries", "the Add to Cart button, and then adds the product to cart :rtype:", "address section typed\") def input_phone_and_email(): \"\"\" This function inputs the phone number and", "def input_shipping_info_guest(): \"\"\" This function inputs the shipping information that the user provides", "discord import Webhook, RequestsWebhookAdapter import json # Loads config file json = json.load(open('config.json',", "waiting for page to load\") email = driver.find_element_by_id(\"fld-e\") email.send_keys(json['email']) print(\"email typed\") password =", "shipping information that the user provides if they have selected to checkout as", "using discord url driver = webdriver.Firefox( executable_path=json['executable_path']) # Creates WebDriver instance url =", "except NoSuchElementException: return False # return True def check_if_shipping_info_needed(): \"\"\" This function checks", "if not y: in_stock = 0 randinteger = randint(1, 5) print( \"Sleeping for", ":rtype: object \"\"\" try: cvv = driver.find_element_by_id('credit-card-cvv') except NoSuchElementException: return False return True", "being in stock\") sleep(randinteger) else: #print(\"Stock found - running script\") #webhook.send(\"@everyone Stock Found\")", "To Cart button, if it does not find it, it means it is", "stock currently and it throws a NoSuchElementException. :return: Returns True for in stock", "(By.CSS_SELECTOR, '.cia-form__controls__submit')) WebDriverWait(driver, timeout).until(element_present) except TimeoutException: print(\"Timed out waiting for page to load\")", "check_if_payment_info_on_page(): \"\"\" This function checks if the bot must enter payment information on", "time_end = time.time() time_diff = time_end - time_start webhook.send( \"@everyone Purchased, Time elapsed:", "shipping, and then clicks update location. :rtype: object \"\"\" try: element_present = EC.presence_of_element_located(", "provides if they have selected to checkout as a guest :rtype: object \"\"\"", "True def add_to_cart(): \"\"\" This function finds the Add to Cart button, and", "y: in_stock = 0 randinteger = randint(1, 5) print( \"Sleeping for \" +", "except TimeoutException: print(\"Timed out waiting for page to load\") fname = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.firstName\")", "guest_or_sign_in == \"sign-in\": sign_in_and_click_button() if not check_if_verify(): quit(0) if check_if_shipping_info_needed() is True: input_shipping_information()", "the account has been flagged for manual user verification :rtype: object \"\"\" try:", "stock and False for not in stock :rtype: None Type \"\"\" try: not_sold_out", "def input_payment_info(): \"\"\" This function inputs the CVV if the user has been", "Suggestions\" in suggestions.text: suggestions.click() print(\"suggestions removed\") address = driver.find_element_by_id( \"consolidatedAddresses.ui_address_2.street\") address.send_keys(json['address']) print(\"street address", "#webhook.send(\"@everyone Stock Found\") #webhook.send(url + json['url']) time_start = time.time() add_to_cart() in_stock = 1", "finds the Add to Cart button, and then adds the product to cart", "element_present = EC.presence_of_element_located( (By.CSS_SELECTOR, '.change-zipcode-link')) WebDriverWait(driver, 10).until(element_present) except TimeoutException: print(\"Timed out waiting for", "zip_code_click.send_keys(Keys.ENTER) print(\"clicked on zip code\") zip_code_change = driver.find_element_by_css_selector( \"#location\") zip_code_change.send_keys(json['zip_code']) update = driver.find_element_by_css_selector(", "i in range(len(json['zip_code'])): zip_code.send_keys(json['zip_code'][i]) print(\"zip code address section typed\") def input_phone_and_email(): \"\"\" This", "cart\") return driver.title def change_zip_code_and_select_shipping(): \"\"\" This function first selects the ZipCode element", "has been logged in during a previous function and has a card saved", "= 0 if purchased.strip() == \"0\": in_stock = 0 while in_stock == 0:", "address.send_keys(json['address'][i]) print(\"street address typed\") city = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[5]/div[1]/div[1]/label[1]/div[1]/input[1]\") for i in range(len(json['city'])): city.send_keys(json['city'][i])", "in suggestions.text: suggestions.click() print(\"suggestions removed\") address = driver.find_element_by_xpath( \"/html[1]/body[1]/div[1]/div[2]/div[1]/div[2]/div[1]/div[1]/main[1]/div[2]/div[2]/form[1]/section[1]/div[1]/div[1]/div[1]/div[1]/section[1]/div[2]/div[1]/section[1]/section[1]/div[3]/label[1]/div[2]/div[1]/div[1]/input[1]\") for i in range(len(json['address'])):" ]
[ "TummyTime) class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = ('id', 'username') class", "= ChildSerializer() class Meta: model = DiaperChange fields = ('child', 'time', 'wet', 'solid',", "Meta: model = User fields = ('id', 'username') class ChildSerializer(serializers.HyperlinkedModelSerializer): class Meta: model", "model = Child fields = ('first_name', 'last_name', 'birth_date', 'slug') lookup_field = 'slug' class", "Timer, TummyTime) class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = ('id', 'username')", "class Meta: model = Timer fields = ('name', 'start', 'end', 'duration', 'active', 'user')", "Sleep fields = ('child', 'start', 'end', 'duration') class TimerSerializer(serializers.HyperlinkedModelSerializer): user = UserSerializer() class", "'method', 'amount') class NoteSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = Note fields", "ChildSerializer() class Meta: model = Note fields = ('child', 'note', 'time') class SleepSerializer(serializers.HyperlinkedModelSerializer):", "(Child, DiaperChange, Feeding, Note, Sleep, Timer, TummyTime) class UserSerializer(serializers.ModelSerializer): class Meta: model =", "from rest_framework import serializers from django.contrib.auth.models import User from core.models import (Child, DiaperChange,", "class SleepSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = Sleep fields = ('child',", "class Meta: model = DiaperChange fields = ('child', 'time', 'wet', 'solid', 'color') class", "'start', 'end', 'duration', 'type', 'method', 'amount') class NoteSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta:", "class ChildSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Child fields = ('first_name', 'last_name', 'birth_date', 'slug')", "django.contrib.auth.models import User from core.models import (Child, DiaperChange, Feeding, Note, Sleep, Timer, TummyTime)", "from __future__ import unicode_literals from rest_framework import serializers from django.contrib.auth.models import User from", "= ('child', 'start', 'end', 'duration', 'type', 'method', 'amount') class NoteSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer()", "= ChildSerializer() class Meta: model = Feeding fields = ('child', 'start', 'end', 'duration',", "Sleep, Timer, TummyTime) class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = ('id',", "= Timer fields = ('name', 'start', 'end', 'duration', 'active', 'user') class TummyTimeSerializer(serializers.HyperlinkedModelSerializer): child", "'start', 'end', 'duration') class TimerSerializer(serializers.HyperlinkedModelSerializer): user = UserSerializer() class Meta: model = Timer", "Feeding, Note, Sleep, Timer, TummyTime) class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields", "import User from core.models import (Child, DiaperChange, Feeding, Note, Sleep, Timer, TummyTime) class", "-*- coding: utf-8 -*- from __future__ import unicode_literals from rest_framework import serializers from", "Meta: model = Child fields = ('first_name', 'last_name', 'birth_date', 'slug') lookup_field = 'slug'", "'end', 'duration', 'type', 'method', 'amount') class NoteSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model", "= User fields = ('id', 'username') class ChildSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Child", "= Note fields = ('child', 'note', 'time') class SleepSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class", "ChildSerializer() class Meta: model = TummyTime fields = ('child', 'start', 'end', 'duration', 'milestone')", "= ('child', 'start', 'end', 'duration') class TimerSerializer(serializers.HyperlinkedModelSerializer): user = UserSerializer() class Meta: model", "child = ChildSerializer() class Meta: model = Feeding fields = ('child', 'start', 'end',", "class Meta: model = Sleep fields = ('child', 'start', 'end', 'duration') class TimerSerializer(serializers.HyperlinkedModelSerializer):", "ChildSerializer() class Meta: model = Feeding fields = ('child', 'start', 'end', 'duration', 'type',", "Meta: model = Timer fields = ('name', 'start', 'end', 'duration', 'active', 'user') class", "fields = ('id', 'username') class ChildSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Child fields =", "model = Sleep fields = ('child', 'start', 'end', 'duration') class TimerSerializer(serializers.HyperlinkedModelSerializer): user =", "fields = ('name', 'start', 'end', 'duration', 'active', 'user') class TummyTimeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer()", "'user') class TummyTimeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = TummyTime fields =", "= ('child', 'time', 'wet', 'solid', 'color') class FeedingSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta:", "FeedingSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = Feeding fields = ('child', 'start',", "'time', 'wet', 'solid', 'color') class FeedingSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model =", "'slug' class DiaperChangeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = DiaperChange fields =", "'end', 'duration', 'active', 'user') class TummyTimeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model =", "model = DiaperChange fields = ('child', 'time', 'wet', 'solid', 'color') class FeedingSerializer(serializers.HyperlinkedModelSerializer): child", "NoteSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = Note fields = ('child', 'note',", "Note fields = ('child', 'note', 'time') class SleepSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta:", "= Child fields = ('first_name', 'last_name', 'birth_date', 'slug') lookup_field = 'slug' class DiaperChangeSerializer(serializers.HyperlinkedModelSerializer):", "Feeding fields = ('child', 'start', 'end', 'duration', 'type', 'method', 'amount') class NoteSerializer(serializers.HyperlinkedModelSerializer): child", "= ('id', 'username') class ChildSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Child fields = ('first_name',", "import unicode_literals from rest_framework import serializers from django.contrib.auth.models import User from core.models import", "'duration', 'type', 'method', 'amount') class NoteSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model =", "'time') class SleepSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = Sleep fields =", "= ('child', 'note', 'time') class SleepSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model =", "('id', 'username') class ChildSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Child fields = ('first_name', 'last_name',", "child = ChildSerializer() class Meta: model = DiaperChange fields = ('child', 'time', 'wet',", "rest_framework import serializers from django.contrib.auth.models import User from core.models import (Child, DiaperChange, Feeding,", "'amount') class NoteSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = Note fields =", "('child', 'start', 'end', 'duration') class TimerSerializer(serializers.HyperlinkedModelSerializer): user = UserSerializer() class Meta: model =", "class Meta: model = User fields = ('id', 'username') class ChildSerializer(serializers.HyperlinkedModelSerializer): class Meta:", "fields = ('child', 'start', 'end', 'duration') class TimerSerializer(serializers.HyperlinkedModelSerializer): user = UserSerializer() class Meta:", "from django.contrib.auth.models import User from core.models import (Child, DiaperChange, Feeding, Note, Sleep, Timer,", "Meta: model = Feeding fields = ('child', 'start', 'end', 'duration', 'type', 'method', 'amount')", "('child', 'note', 'time') class SleepSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = Sleep", "# -*- coding: utf-8 -*- from __future__ import unicode_literals from rest_framework import serializers", "class TummyTimeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = TummyTime fields = ('child',", "User fields = ('id', 'username') class ChildSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Child fields", "DiaperChange, Feeding, Note, Sleep, Timer, TummyTime) class UserSerializer(serializers.ModelSerializer): class Meta: model = User", "child = ChildSerializer() class Meta: model = Note fields = ('child', 'note', 'time')", "= ('name', 'start', 'end', 'duration', 'active', 'user') class TummyTimeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class", "'active', 'user') class TummyTimeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = TummyTime fields", "coding: utf-8 -*- from __future__ import unicode_literals from rest_framework import serializers from django.contrib.auth.models", "class DiaperChangeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = DiaperChange fields = ('child',", "UserSerializer() class Meta: model = Timer fields = ('name', 'start', 'end', 'duration', 'active',", "serializers from django.contrib.auth.models import User from core.models import (Child, DiaperChange, Feeding, Note, Sleep,", "Timer fields = ('name', 'start', 'end', 'duration', 'active', 'user') class TummyTimeSerializer(serializers.HyperlinkedModelSerializer): child =", "DiaperChange fields = ('child', 'time', 'wet', 'solid', 'color') class FeedingSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer()", "<reponame>vault-the/babybuddy<filename>api/serializers.py # -*- coding: utf-8 -*- from __future__ import unicode_literals from rest_framework import", "user = UserSerializer() class Meta: model = Timer fields = ('name', 'start', 'end',", "DiaperChangeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = DiaperChange fields = ('child', 'time',", "= ('first_name', 'last_name', 'birth_date', 'slug') lookup_field = 'slug' class DiaperChangeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer()", "'duration', 'active', 'user') class TummyTimeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = TummyTime", "ChildSerializer() class Meta: model = Sleep fields = ('child', 'start', 'end', 'duration') class", "import serializers from django.contrib.auth.models import User from core.models import (Child, DiaperChange, Feeding, Note,", "class Meta: model = Note fields = ('child', 'note', 'time') class SleepSerializer(serializers.HyperlinkedModelSerializer): child", "TimerSerializer(serializers.HyperlinkedModelSerializer): user = UserSerializer() class Meta: model = Timer fields = ('name', 'start',", "'start', 'end', 'duration', 'active', 'user') class TummyTimeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model", "from core.models import (Child, DiaperChange, Feeding, Note, Sleep, Timer, TummyTime) class UserSerializer(serializers.ModelSerializer): class", "= ChildSerializer() class Meta: model = Sleep fields = ('child', 'start', 'end', 'duration')", "'solid', 'color') class FeedingSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = Feeding fields", "'type', 'method', 'amount') class NoteSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = Note", "'end', 'duration') class TimerSerializer(serializers.HyperlinkedModelSerializer): user = UserSerializer() class Meta: model = Timer fields", "child = ChildSerializer() class Meta: model = TummyTime fields = ('child', 'start', 'end',", "ChildSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Child fields = ('first_name', 'last_name', 'birth_date', 'slug') lookup_field", "fields = ('first_name', 'last_name', 'birth_date', 'slug') lookup_field = 'slug' class DiaperChangeSerializer(serializers.HyperlinkedModelSerializer): child =", "model = Feeding fields = ('child', 'start', 'end', 'duration', 'type', 'method', 'amount') class", "Meta: model = Sleep fields = ('child', 'start', 'end', 'duration') class TimerSerializer(serializers.HyperlinkedModelSerializer): user", "'note', 'time') class SleepSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = Sleep fields", "'wet', 'solid', 'color') class FeedingSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = Feeding", "= ChildSerializer() class Meta: model = Note fields = ('child', 'note', 'time') class", "class Meta: model = Feeding fields = ('child', 'start', 'end', 'duration', 'type', 'method',", "('child', 'time', 'wet', 'solid', 'color') class FeedingSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model", "'username') class ChildSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = Child fields = ('first_name', 'last_name', 'birth_date',", "model = Timer fields = ('name', 'start', 'end', 'duration', 'active', 'user') class TummyTimeSerializer(serializers.HyperlinkedModelSerializer):", "class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = ('id', 'username') class ChildSerializer(serializers.HyperlinkedModelSerializer):", "__future__ import unicode_literals from rest_framework import serializers from django.contrib.auth.models import User from core.models", "model = Note fields = ('child', 'note', 'time') class SleepSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer()", "fields = ('child', 'start', 'end', 'duration', 'type', 'method', 'amount') class NoteSerializer(serializers.HyperlinkedModelSerializer): child =", "'birth_date', 'slug') lookup_field = 'slug' class DiaperChangeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model", "User from core.models import (Child, DiaperChange, Feeding, Note, Sleep, Timer, TummyTime) class UserSerializer(serializers.ModelSerializer):", "TummyTimeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = TummyTime fields = ('child', 'start',", "= UserSerializer() class Meta: model = Timer fields = ('name', 'start', 'end', 'duration',", "Note, Sleep, Timer, TummyTime) class UserSerializer(serializers.ModelSerializer): class Meta: model = User fields =", "= ChildSerializer() class Meta: model = TummyTime fields = ('child', 'start', 'end', 'duration',", "utf-8 -*- from __future__ import unicode_literals from rest_framework import serializers from django.contrib.auth.models import", "fields = ('child', 'note', 'time') class SleepSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model", "Child fields = ('first_name', 'last_name', 'birth_date', 'slug') lookup_field = 'slug' class DiaperChangeSerializer(serializers.HyperlinkedModelSerializer): child", "UserSerializer(serializers.ModelSerializer): class Meta: model = User fields = ('id', 'username') class ChildSerializer(serializers.HyperlinkedModelSerializer): class", "class Meta: model = Child fields = ('first_name', 'last_name', 'birth_date', 'slug') lookup_field =", "class TimerSerializer(serializers.HyperlinkedModelSerializer): user = UserSerializer() class Meta: model = Timer fields = ('name',", "'duration') class TimerSerializer(serializers.HyperlinkedModelSerializer): user = UserSerializer() class Meta: model = Timer fields =", "Meta: model = Note fields = ('child', 'note', 'time') class SleepSerializer(serializers.HyperlinkedModelSerializer): child =", "= 'slug' class DiaperChangeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = DiaperChange fields", "import (Child, DiaperChange, Feeding, Note, Sleep, Timer, TummyTime) class UserSerializer(serializers.ModelSerializer): class Meta: model", "('name', 'start', 'end', 'duration', 'active', 'user') class TummyTimeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta:", "'color') class FeedingSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = Feeding fields =", "Meta: model = DiaperChange fields = ('child', 'time', 'wet', 'solid', 'color') class FeedingSerializer(serializers.HyperlinkedModelSerializer):", "model = User fields = ('id', 'username') class ChildSerializer(serializers.HyperlinkedModelSerializer): class Meta: model =", "fields = ('child', 'time', 'wet', 'solid', 'color') class FeedingSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class", "SleepSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = Sleep fields = ('child', 'start',", "ChildSerializer() class Meta: model = DiaperChange fields = ('child', 'time', 'wet', 'solid', 'color')", "('first_name', 'last_name', 'birth_date', 'slug') lookup_field = 'slug' class DiaperChangeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class", "'last_name', 'birth_date', 'slug') lookup_field = 'slug' class DiaperChangeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta:", "core.models import (Child, DiaperChange, Feeding, Note, Sleep, Timer, TummyTime) class UserSerializer(serializers.ModelSerializer): class Meta:", "unicode_literals from rest_framework import serializers from django.contrib.auth.models import User from core.models import (Child,", "'slug') lookup_field = 'slug' class DiaperChangeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model =", "child = ChildSerializer() class Meta: model = Sleep fields = ('child', 'start', 'end',", "= Feeding fields = ('child', 'start', 'end', 'duration', 'type', 'method', 'amount') class NoteSerializer(serializers.HyperlinkedModelSerializer):", "= DiaperChange fields = ('child', 'time', 'wet', 'solid', 'color') class FeedingSerializer(serializers.HyperlinkedModelSerializer): child =", "('child', 'start', 'end', 'duration', 'type', 'method', 'amount') class NoteSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class", "= Sleep fields = ('child', 'start', 'end', 'duration') class TimerSerializer(serializers.HyperlinkedModelSerializer): user = UserSerializer()", "-*- from __future__ import unicode_literals from rest_framework import serializers from django.contrib.auth.models import User", "lookup_field = 'slug' class DiaperChangeSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = DiaperChange", "class NoteSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = Note fields = ('child',", "class FeedingSerializer(serializers.HyperlinkedModelSerializer): child = ChildSerializer() class Meta: model = Feeding fields = ('child'," ]
[ "Quantity, Recipe from .lists import UserList, UserListRecipe from .users import UserIngredient, Profile from", ".lists import UserList, UserListRecipe from .users import UserIngredient, Profile from .comments import Comment", "<filename>api/drinks/models/__init__.py from .activities import Activity from .books import Book, BookUser from .recipes import", "UserIngredient, Profile from .comments import Comment from .ingredients import Ingredient from .uom import", ".comments import Comment from .ingredients import Ingredient from .uom import Uom from .tags", ".activities import Activity from .books import Book, BookUser from .recipes import Quantity, Recipe", "Book, BookUser from .recipes import Quantity, Recipe from .lists import UserList, UserListRecipe from", "import Activity from .books import Book, BookUser from .recipes import Quantity, Recipe from", "from .books import Book, BookUser from .recipes import Quantity, Recipe from .lists import", "BookUser from .recipes import Quantity, Recipe from .lists import UserList, UserListRecipe from .users", "UserList, UserListRecipe from .users import UserIngredient, Profile from .comments import Comment from .ingredients", "Profile from .comments import Comment from .ingredients import Ingredient from .uom import Uom", ".books import Book, BookUser from .recipes import Quantity, Recipe from .lists import UserList,", "import Quantity, Recipe from .lists import UserList, UserListRecipe from .users import UserIngredient, Profile", "from .users import UserIngredient, Profile from .comments import Comment from .ingredients import Ingredient", "UserListRecipe from .users import UserIngredient, Profile from .comments import Comment from .ingredients import", "Activity from .books import Book, BookUser from .recipes import Quantity, Recipe from .lists", "import UserIngredient, Profile from .comments import Comment from .ingredients import Ingredient from .uom", "import UserList, UserListRecipe from .users import UserIngredient, Profile from .comments import Comment from", "Recipe from .lists import UserList, UserListRecipe from .users import UserIngredient, Profile from .comments", "import Book, BookUser from .recipes import Quantity, Recipe from .lists import UserList, UserListRecipe", "from .activities import Activity from .books import Book, BookUser from .recipes import Quantity,", "from .comments import Comment from .ingredients import Ingredient from .uom import Uom from", "from .lists import UserList, UserListRecipe from .users import UserIngredient, Profile from .comments import", "from .recipes import Quantity, Recipe from .lists import UserList, UserListRecipe from .users import", "Comment from .ingredients import Ingredient from .uom import Uom from .tags import Tag", ".recipes import Quantity, Recipe from .lists import UserList, UserListRecipe from .users import UserIngredient,", "import Comment from .ingredients import Ingredient from .uom import Uom from .tags import", ".users import UserIngredient, Profile from .comments import Comment from .ingredients import Ingredient from" ]
[ "logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['training_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def get_log_object_for_prediction(collection_name: str, execution_id :", "is set to True then only it will write the logs. Defaults to", "the yaml file \"\"\" with open(config_path, 'r') as f: return yaml.safe_load(f) def get_log_object_for_training(collection_name:", "project_id=project_id, databasename=params['database_logs']['prediction_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def read_prediction_schema(): \"\"\"Responsible for reading the schema", "for reading the schema from schema_prediction.json \"\"\" params=read_params() path=params['data_schemas']['prediction_schema'] with open(path) as f:", "and then ceate the directory . Defaults to False. \"\"\" if is_recreate: try:", "NumberofColumns = schema['NumberofColumns'] ColName = schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def read_training_schema(): \"\"\"Responsible for reading", "\"\"\"It will give the Log Object for training Args: collection_name (str): Name of", "is_recreate (bool, optional): If True then it will first delete and then ceate", "Defaults to False. \"\"\" if is_recreate: try: shutil.rmtree(path) except Exception: pass os.makedirs(path,exist_ok=True) #", "='config/params.yaml')->dict: \"\"\"Responsible for reading the yaml file Args: config_path (str): Path of the", "executed_by: str=None, project_id :str=None, is_log_enabled : bool=True): \"\"\"It will give the Log Object", "= schema['LengthOfTimeStampInFile'] NumberofColumns = schema['NumberofColumns'] ColName = schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def read_training_schema(): \"\"\"Responsible", "os.makedirs(path,exist_ok=True) # It will not through error if the folder already exists def", "the logs. Defaults to True. Returns: Logger: Logger Object \"\"\" params=read_params() if execution_id==None:", "will first delete and then ceate the directory . Defaults to False. \"\"\"", "collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def get_log_object_for_prediction(collection_name: str, execution_id : str=None, executed_by: str=None, project_id", "optional): Id of the project. Defaults to None. is_log_enabled (bool, optional): If it", "only it will write the logs. Defaults to True. Returns: Logger: Logger Object", "stored execution_id (str, optional): Execution id. Defaults to None. executed_by (str, optional): Executed", "asyncore import read import os import shutil import yaml import json from app_logger", "import json from app_logger import logger from datetime import datetime import uuid def", "\"\"\"It will give the Log Object for prediction Args: collection_name (str): Name of", "Give the full path with directory name is_recreate (bool, optional): If True then", "\"\"\"Responsible for reading the schema from schema_training.json \"\"\" params=read_params() path = params['data_schemas']['training_schema'] with", "params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['training_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def get_log_object_for_prediction(collection_name:", "ColName = schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def read_training_schema(): \"\"\"Responsible for reading the schema from", "ColName = schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def get_date(): \"\"\"Returns the current date. \"\"\" return", "LengthOfTimeStampInFile = schema['LengthOfTimeStampInFile'] NumberofColumns = schema['NumberofColumns'] ColName = schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def get_date():", "path=params['data_schemas']['prediction_schema'] with open(path) as f: schema=json.load(f) LengthOfDateStampInFile = schema['LengthOfDateStampInFile'] LengthOfTimeStampInFile = schema['LengthOfTimeStampInFile'] NumberofColumns", "None. is_log_enabled (bool, optional): If it is set to True then only it", "LengthOfDateStampInFile = schema['LengthOfDateStampInFile'] LengthOfTimeStampInFile = schema['LengthOfTimeStampInFile'] NumberofColumns = schema['NumberofColumns'] ColName = schema['ColName'] return", "date. \"\"\" return datetime.now().date().strftime('%d-%m-%y') def get_time(): \"\"\"Returns the current time \"\"\" return datetime.now().time().strftime('%H-%M-%S')", "path = params['data_schemas']['training_schema'] with open(path) as f: schema=json.load(f) LengthOfDateStampInFile = schema['LengthOfDateStampInFile'] LengthOfTimeStampInFile =", "to None. is_log_enabled (bool, optional): If it is set to True then only", "bool=True): \"\"\"It will give the Log Object for training Args: collection_name (str): Name", "optional): If it is set to True then only it will write the", "ceate the directory . Defaults to False. \"\"\" if is_recreate: try: shutil.rmtree(path) except", "Exception: pass os.makedirs(path,exist_ok=True) # It will not through error if the folder already", "Yaml file . Defaults to 'config/params.yaml' Returns: dict: Return the details of the", "folder already exists def read_params(config_path: str ='config/params.yaml')->dict: \"\"\"Responsible for reading the yaml file", "NumberofColumns = schema['NumberofColumns'] ColName = schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def get_date(): \"\"\"Returns the current", "the directory . Defaults to False. \"\"\" if is_recreate: try: shutil.rmtree(path) except Exception:", "read_prediction_schema(): \"\"\"Responsible for reading the schema from schema_prediction.json \"\"\" params=read_params() path=params['data_schemas']['prediction_schema'] with open(path)", "return yaml.safe_load(f) def get_log_object_for_training(collection_name: str, execution_id : str=None, executed_by: str=None, project_id :str=None, is_log_enabled", "of the yaml file \"\"\" with open(config_path, 'r') as f: return yaml.safe_load(f) def", "import datetime import uuid def create_directory(path: str, is_recreate: bool = False)->None: \"\"\"Utility to", "schema['NumberofColumns'] ColName = schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def read_training_schema(): \"\"\"Responsible for reading the schema", "= logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['training_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def get_log_object_for_prediction(collection_name: str, execution_id", "Path of the Yaml file . Defaults to 'config/params.yaml' Returns: dict: Return the", "through error if the folder already exists def read_params(config_path: str ='config/params.yaml')->dict: \"\"\"Responsible for", "with directory name is_recreate (bool, optional): If True then it will first delete", "logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['prediction_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def read_prediction_schema(): \"\"\"Responsible for reading", "from schema_prediction.json \"\"\" params=read_params() path=params['data_schemas']['prediction_schema'] with open(path) as f: schema=json.load(f) LengthOfDateStampInFile = schema['LengthOfDateStampInFile']", "\"\"\"Responsible for reading the schema from schema_prediction.json \"\"\" params=read_params() path=params['data_schemas']['prediction_schema'] with open(path) as", "be stored execution_id (str, optional): Execution id. Defaults to None. executed_by (str, optional):", "is_log_enabled=is_log_enabled) return logger_obj def get_log_object_for_prediction(collection_name: str, execution_id : str=None, executed_by: str=None, project_id :str=None,", "Log Object for training Args: collection_name (str): Name of the collection in which", ": bool=True): \"\"\"It will give the Log Object for prediction Args: collection_name (str):", "import yaml import json from app_logger import logger from datetime import datetime import", "params['data_schemas']['training_schema'] with open(path) as f: schema=json.load(f) LengthOfDateStampInFile = schema['LengthOfDateStampInFile'] LengthOfTimeStampInFile = schema['LengthOfTimeStampInFile'] NumberofColumns", "open(path) as f: schema=json.load(f) LengthOfDateStampInFile = schema['LengthOfDateStampInFile'] LengthOfTimeStampInFile = schema['LengthOfTimeStampInFile'] NumberofColumns = schema['NumberofColumns']", "the full path with directory name is_recreate (bool, optional): If True then it", "the dirctory Args: path (str): Give the full path with directory name is_recreate", "os import shutil import yaml import json from app_logger import logger from datetime", "\"\"\" params=read_params() path=params['data_schemas']['prediction_schema'] with open(path) as f: schema=json.load(f) LengthOfDateStampInFile = schema['LengthOfDateStampInFile'] LengthOfTimeStampInFile =", "(str): Name of the collection in which the log will be stored execution_id", "full path with directory name is_recreate (bool, optional): If True then it will", "read_training_schema(): \"\"\"Responsible for reading the schema from schema_training.json \"\"\" params=read_params() path = params['data_schemas']['training_schema']", "collection in which the log will be stored execution_id (str, optional): Execution id.", "reading the schema from schema_training.json \"\"\" params=read_params() path = params['data_schemas']['training_schema'] with open(path) as", "Args: config_path (str): Path of the Yaml file . Defaults to 'config/params.yaml' Returns:", "= schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def read_training_schema(): \"\"\"Responsible for reading the schema from schema_training.json", "project_id :str=None, is_log_enabled : bool=True): \"\"\"It will give the Log Object for prediction", "app_logger import logger from datetime import datetime import uuid def create_directory(path: str, is_recreate:", "Object for prediction Args: collection_name (str): Name of the collection in which the", "schema=json.load(f) LengthOfDateStampInFile = schema['LengthOfDateStampInFile'] LengthOfTimeStampInFile = schema['LengthOfTimeStampInFile'] NumberofColumns = schema['NumberofColumns'] ColName = schema['ColName']", "execution_id=uuid.uuid4().hex if executed_by==None: executed_by=params['base']['author'] if project_id==None: project_id = params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by,", "to True then only it will write the logs. Defaults to True. Returns:", "logger_obj def get_log_object_for_prediction(collection_name: str, execution_id : str=None, executed_by: str=None, project_id :str=None, is_log_enabled :", "id. Defaults to None. executed_by (str, optional): Executed by. Defaults to None. project_id", "try: shutil.rmtree(path) except Exception: pass os.makedirs(path,exist_ok=True) # It will not through error if", "Execution id. Defaults to None. executed_by (str, optional): Executed by. Defaults to None.", "return logger_obj def get_log_object_for_prediction(collection_name: str, execution_id : str=None, executed_by: str=None, project_id :str=None, is_log_enabled", "path with directory name is_recreate (bool, optional): If True then it will first", "return logger_obj def read_prediction_schema(): \"\"\"Responsible for reading the schema from schema_prediction.json \"\"\" params=read_params()", ":str=None, is_log_enabled : bool=True): \"\"\"It will give the Log Object for prediction Args:", "= params['data_schemas']['training_schema'] with open(path) as f: schema=json.load(f) LengthOfDateStampInFile = schema['LengthOfDateStampInFile'] LengthOfTimeStampInFile = schema['LengthOfTimeStampInFile']", "error if the folder already exists def read_params(config_path: str ='config/params.yaml')->dict: \"\"\"Responsible for reading", "True then it will first delete and then ceate the directory . Defaults", "shutil import yaml import json from app_logger import logger from datetime import datetime", "log will be stored execution_id (str, optional): Execution id. Defaults to None. executed_by", "uuid def create_directory(path: str, is_recreate: bool = False)->None: \"\"\"Utility to create the dirctory", "executed_by==None: executed_by=params['base']['author'] if project_id==None: project_id = params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['training_logs']['database_name'],", "get_log_object_for_training(collection_name: str, execution_id : str=None, executed_by: str=None, project_id :str=None, is_log_enabled : bool=True): \"\"\"It", "current date. \"\"\" return datetime.now().date().strftime('%d-%m-%y') def get_time(): \"\"\"Returns the current time \"\"\" return", "(str, optional): Execution id. Defaults to None. executed_by (str, optional): Executed by. Defaults", "the schema from schema_training.json \"\"\" params=read_params() path = params['data_schemas']['training_schema'] with open(path) as f:", "optional): Executed by. Defaults to None. project_id (str, optional): Id of the project.", "False. \"\"\" if is_recreate: try: shutil.rmtree(path) except Exception: pass os.makedirs(path,exist_ok=True) # It will", "def create_directory(path: str, is_recreate: bool = False)->None: \"\"\"Utility to create the dirctory Args:", "params=read_params() path=params['data_schemas']['prediction_schema'] with open(path) as f: schema=json.load(f) LengthOfDateStampInFile = schema['LengthOfDateStampInFile'] LengthOfTimeStampInFile = schema['LengthOfTimeStampInFile']", "executed_by=params['base']['author'] if project_id==None: project_id = params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['training_logs']['database_name'], collection_name=collection_name,", "from app_logger import logger from datetime import datetime import uuid def create_directory(path: str,", "optional): If True then it will first delete and then ceate the directory", "to True. Returns: Logger: Logger Object \"\"\" params=read_params() if execution_id==None: execution_id=uuid.uuid4().hex if executed_by==None:", "return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def read_training_schema(): \"\"\"Responsible for reading the schema from schema_training.json \"\"\" params=read_params()", "shutil.rmtree(path) except Exception: pass os.makedirs(path,exist_ok=True) # It will not through error if the", "Name of the collection in which the log will be stored execution_id (str,", "is_log_enabled : bool=True): \"\"\"It will give the Log Object for prediction Args: collection_name", "Defaults to None. executed_by (str, optional): Executed by. Defaults to None. project_id (str,", "Defaults to 'config/params.yaml' Returns: dict: Return the details of the yaml file \"\"\"", "yaml file \"\"\" with open(config_path, 'r') as f: return yaml.safe_load(f) def get_log_object_for_training(collection_name: str,", "get_log_object_for_prediction(collection_name: str, execution_id : str=None, executed_by: str=None, project_id :str=None, is_log_enabled : bool=True): \"\"\"It", "the collection in which the log will be stored execution_id (str, optional): Execution", "project_id :str=None, is_log_enabled : bool=True): \"\"\"It will give the Log Object for training", "to None. project_id (str, optional): Id of the project. Defaults to None. is_log_enabled", "give the Log Object for prediction Args: collection_name (str): Name of the collection", "'r') as f: return yaml.safe_load(f) def get_log_object_for_training(collection_name: str, execution_id : str=None, executed_by: str=None,", "will give the Log Object for prediction Args: collection_name (str): Name of the", "except Exception: pass os.makedirs(path,exist_ok=True) # It will not through error if the folder", "if is_recreate: try: shutil.rmtree(path) except Exception: pass os.makedirs(path,exist_ok=True) # It will not through", "the yaml file Args: config_path (str): Path of the Yaml file . Defaults", "schema['NumberofColumns'] ColName = schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def get_date(): \"\"\"Returns the current date. \"\"\"", "= False)->None: \"\"\"Utility to create the dirctory Args: path (str): Give the full", "= schema['NumberofColumns'] ColName = schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def get_date(): \"\"\"Returns the current date.", "(str): Path of the Yaml file . Defaults to 'config/params.yaml' Returns: dict: Return", "will not through error if the folder already exists def read_params(config_path: str ='config/params.yaml')->dict:", "(bool, optional): If it is set to True then only it will write", "schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def get_date(): \"\"\"Returns the current date. \"\"\" return datetime.now().date().strftime('%d-%m-%y') def", "which the log will be stored execution_id (str, optional): Execution id. Defaults to", "If it is set to True then only it will write the logs.", "Log Object for prediction Args: collection_name (str): Name of the collection in which", "training Args: collection_name (str): Name of the collection in which the log will", "schema_training.json \"\"\" params=read_params() path = params['data_schemas']['training_schema'] with open(path) as f: schema=json.load(f) LengthOfDateStampInFile =", "from schema_training.json \"\"\" params=read_params() path = params['data_schemas']['training_schema'] with open(path) as f: schema=json.load(f) LengthOfDateStampInFile", "logger_obj def read_prediction_schema(): \"\"\"Responsible for reading the schema from schema_prediction.json \"\"\" params=read_params() path=params['data_schemas']['prediction_schema']", "set to True then only it will write the logs. Defaults to True.", "create the dirctory Args: path (str): Give the full path with directory name", "for training Args: collection_name (str): Name of the collection in which the log", "delete and then ceate the directory . Defaults to False. \"\"\" if is_recreate:", "then ceate the directory . Defaults to False. \"\"\" if is_recreate: try: shutil.rmtree(path)", "reading the yaml file Args: config_path (str): Path of the Yaml file .", "project_id = params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['training_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj", "executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['prediction_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def read_prediction_schema(): \"\"\"Responsible for reading the", "to False. \"\"\" if is_recreate: try: shutil.rmtree(path) except Exception: pass os.makedirs(path,exist_ok=True) # It", "\"\"\"Utility to create the dirctory Args: path (str): Give the full path with", "executed_by==None: executed_by=params['base']['author'] if project_id==None: project_id = params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['prediction_logs']['database_name'],", "str ='config/params.yaml')->dict: \"\"\"Responsible for reading the yaml file Args: config_path (str): Path of", "(str): Give the full path with directory name is_recreate (bool, optional): If True", "= params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['prediction_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def", "exists def read_params(config_path: str ='config/params.yaml')->dict: \"\"\"Responsible for reading the yaml file Args: config_path", "f: return yaml.safe_load(f) def get_log_object_for_training(collection_name: str, execution_id : str=None, executed_by: str=None, project_id :str=None,", "file . Defaults to 'config/params.yaml' Returns: dict: Return the details of the yaml", "Defaults to True. Returns: Logger: Logger Object \"\"\" params=read_params() if execution_id==None: execution_id=uuid.uuid4().hex if", "for prediction Args: collection_name (str): Name of the collection in which the log", "as f: return yaml.safe_load(f) def get_log_object_for_training(collection_name: str, execution_id : str=None, executed_by: str=None, project_id", "config_path (str): Path of the Yaml file . Defaults to 'config/params.yaml' Returns: dict:", "LengthOfTimeStampInFile = schema['LengthOfTimeStampInFile'] NumberofColumns = schema['NumberofColumns'] ColName = schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def read_training_schema():", "Object for training Args: collection_name (str): Name of the collection in which the", "schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def read_training_schema(): \"\"\"Responsible for reading the schema from schema_training.json \"\"\"", "get_date(): \"\"\"Returns the current date. \"\"\" return datetime.now().date().strftime('%d-%m-%y') def get_time(): \"\"\"Returns the current", "LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def get_date(): \"\"\"Returns the current date. \"\"\" return datetime.now().date().strftime('%d-%m-%y') def get_time(): \"\"\"Returns", "to create the dirctory Args: path (str): Give the full path with directory", "databasename=params['database_logs']['training_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def get_log_object_for_prediction(collection_name: str, execution_id : str=None, executed_by: str=None,", "= logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['prediction_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def read_prediction_schema(): \"\"\"Responsible for", "= schema['LengthOfDateStampInFile'] LengthOfTimeStampInFile = schema['LengthOfTimeStampInFile'] NumberofColumns = schema['NumberofColumns'] ColName = schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName", "the schema from schema_prediction.json \"\"\" params=read_params() path=params['data_schemas']['prediction_schema'] with open(path) as f: schema=json.load(f) LengthOfDateStampInFile", "(bool, optional): If True then it will first delete and then ceate the", "def get_log_object_for_training(collection_name: str, execution_id : str=None, executed_by: str=None, project_id :str=None, is_log_enabled : bool=True):", "will be stored execution_id (str, optional): Execution id. Defaults to None. executed_by (str,", "prediction Args: collection_name (str): Name of the collection in which the log will", "\"\"\"Responsible for reading the yaml file Args: config_path (str): Path of the Yaml", "import read import os import shutil import yaml import json from app_logger import", "logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['prediction_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def read_prediction_schema(): \"\"\"Responsible", "if the folder already exists def read_params(config_path: str ='config/params.yaml')->dict: \"\"\"Responsible for reading the", "import os import shutil import yaml import json from app_logger import logger from", "pass os.makedirs(path,exist_ok=True) # It will not through error if the folder already exists", "None. executed_by (str, optional): Executed by. Defaults to None. project_id (str, optional): Id", "params=read_params() path = params['data_schemas']['training_schema'] with open(path) as f: schema=json.load(f) LengthOfDateStampInFile = schema['LengthOfDateStampInFile'] LengthOfTimeStampInFile", "Returns: Logger: Logger Object \"\"\" params=read_params() if execution_id==None: execution_id=uuid.uuid4().hex if executed_by==None: executed_by=params['base']['author'] if", "project. Defaults to None. is_log_enabled (bool, optional): If it is set to True", "str=None, project_id :str=None, is_log_enabled : bool=True): \"\"\"It will give the Log Object for", "the Log Object for prediction Args: collection_name (str): Name of the collection in", "yaml.safe_load(f) def get_log_object_for_training(collection_name: str, execution_id : str=None, executed_by: str=None, project_id :str=None, is_log_enabled :", "is_log_enabled=is_log_enabled) return logger_obj def read_prediction_schema(): \"\"\"Responsible for reading the schema from schema_prediction.json \"\"\"", "True then only it will write the logs. Defaults to True. Returns: Logger:", "is_recreate: try: shutil.rmtree(path) except Exception: pass os.makedirs(path,exist_ok=True) # It will not through error", "the current date. \"\"\" return datetime.now().date().strftime('%d-%m-%y') def get_time(): \"\"\"Returns the current time \"\"\"", "of the collection in which the log will be stored execution_id (str, optional):", "(str, optional): Executed by. Defaults to None. project_id (str, optional): Id of the", "logger from datetime import datetime import uuid def create_directory(path: str, is_recreate: bool =", "# It will not through error if the folder already exists def read_params(config_path:", ": bool=True): \"\"\"It will give the Log Object for training Args: collection_name (str):", "write the logs. Defaults to True. Returns: Logger: Logger Object \"\"\" params=read_params() if", ". Defaults to False. \"\"\" if is_recreate: try: shutil.rmtree(path) except Exception: pass os.makedirs(path,exist_ok=True)", "by. Defaults to None. project_id (str, optional): Id of the project. Defaults to", "datetime import uuid def create_directory(path: str, is_recreate: bool = False)->None: \"\"\"Utility to create", "Object \"\"\" params=read_params() if execution_id==None: execution_id=uuid.uuid4().hex if executed_by==None: executed_by=params['base']['author'] if project_id==None: project_id =", "from asyncore import read import os import shutil import yaml import json from", "json from app_logger import logger from datetime import datetime import uuid def create_directory(path:", "project_id==None: project_id = params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['training_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return", "f: schema=json.load(f) LengthOfDateStampInFile = schema['LengthOfDateStampInFile'] LengthOfTimeStampInFile = schema['LengthOfTimeStampInFile'] NumberofColumns = schema['NumberofColumns'] ColName =", "dict: Return the details of the yaml file \"\"\" with open(config_path, 'r') as", "it will write the logs. Defaults to True. Returns: Logger: Logger Object \"\"\"", "is_log_enabled : bool=True): \"\"\"It will give the Log Object for training Args: collection_name", "yaml file Args: config_path (str): Path of the Yaml file . Defaults to", "if executed_by==None: executed_by=params['base']['author'] if project_id==None: project_id = params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id,", "Returns: dict: Return the details of the yaml file \"\"\" with open(config_path, 'r')", "executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['training_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def get_log_object_for_prediction(collection_name: str, execution_id : str=None,", ": str=None, executed_by: str=None, project_id :str=None, is_log_enabled : bool=True): \"\"\"It will give the", "def read_training_schema(): \"\"\"Responsible for reading the schema from schema_training.json \"\"\" params=read_params() path =", "Logger: Logger Object \"\"\" params=read_params() if execution_id==None: execution_id=uuid.uuid4().hex if executed_by==None: executed_by=params['base']['author'] if project_id==None:", "it will first delete and then ceate the directory . Defaults to False.", "the folder already exists def read_params(config_path: str ='config/params.yaml')->dict: \"\"\"Responsible for reading the yaml", "LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def read_training_schema(): \"\"\"Responsible for reading the schema from schema_training.json \"\"\" params=read_params() path", "\"\"\"Returns the current date. \"\"\" return datetime.now().date().strftime('%d-%m-%y') def get_time(): \"\"\"Returns the current time", "(str, optional): Id of the project. Defaults to None. is_log_enabled (bool, optional): If", "str, is_recreate: bool = False)->None: \"\"\"Utility to create the dirctory Args: path (str):", "schema['LengthOfDateStampInFile'] LengthOfTimeStampInFile = schema['LengthOfTimeStampInFile'] NumberofColumns = schema['NumberofColumns'] ColName = schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def", "file \"\"\" with open(config_path, 'r') as f: return yaml.safe_load(f) def get_log_object_for_training(collection_name: str, execution_id", "then only it will write the logs. Defaults to True. Returns: Logger: Logger", "bool=True): \"\"\"It will give the Log Object for prediction Args: collection_name (str): Name", "is_log_enabled (bool, optional): If it is set to True then only it will", "project_id (str, optional): Id of the project. Defaults to None. is_log_enabled (bool, optional):", "open(config_path, 'r') as f: return yaml.safe_load(f) def get_log_object_for_training(collection_name: str, execution_id : str=None, executed_by:", "Defaults to None. is_log_enabled (bool, optional): If it is set to True then", "the details of the yaml file \"\"\" with open(config_path, 'r') as f: return", "params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['prediction_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def read_prediction_schema():", "give the Log Object for training Args: collection_name (str): Name of the collection", "collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def read_prediction_schema(): \"\"\"Responsible for reading the schema from schema_prediction.json", "execution_id : str=None, executed_by: str=None, project_id :str=None, is_log_enabled : bool=True): \"\"\"It will give", "schema['LengthOfTimeStampInFile'] NumberofColumns = schema['NumberofColumns'] ColName = schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def get_date(): \"\"\"Returns the", "import logger from datetime import datetime import uuid def create_directory(path: str, is_recreate: bool", "import uuid def create_directory(path: str, is_recreate: bool = False)->None: \"\"\"Utility to create the", "schema from schema_training.json \"\"\" params=read_params() path = params['data_schemas']['training_schema'] with open(path) as f: schema=json.load(f)", "def read_params(config_path: str ='config/params.yaml')->dict: \"\"\"Responsible for reading the yaml file Args: config_path (str):", "yaml import json from app_logger import logger from datetime import datetime import uuid", "executed_by (str, optional): Executed by. Defaults to None. project_id (str, optional): Id of", "Logger Object \"\"\" params=read_params() if execution_id==None: execution_id=uuid.uuid4().hex if executed_by==None: executed_by=params['base']['author'] if project_id==None: project_id", ". Defaults to 'config/params.yaml' Returns: dict: Return the details of the yaml file", "bool = False)->None: \"\"\"Utility to create the dirctory Args: path (str): Give the", "'config/params.yaml' Returns: dict: Return the details of the yaml file \"\"\" with open(config_path,", "will give the Log Object for training Args: collection_name (str): Name of the", "the Yaml file . Defaults to 'config/params.yaml' Returns: dict: Return the details of", "return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def get_date(): \"\"\"Returns the current date. \"\"\" return datetime.now().date().strftime('%d-%m-%y') def get_time():", "first delete and then ceate the directory . Defaults to False. \"\"\" if", "the Log Object for training Args: collection_name (str): Name of the collection in", "for reading the schema from schema_training.json \"\"\" params=read_params() path = params['data_schemas']['training_schema'] with open(path)", "not through error if the folder already exists def read_params(config_path: str ='config/params.yaml')->dict: \"\"\"Responsible", "Return the details of the yaml file \"\"\" with open(config_path, 'r') as f:", "\"\"\" params=read_params() if execution_id==None: execution_id=uuid.uuid4().hex if executed_by==None: executed_by=params['base']['author'] if project_id==None: project_id = params['base']['project_id']", "project_id=project_id, databasename=params['database_logs']['training_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def get_log_object_for_prediction(collection_name: str, execution_id : str=None, executed_by:", "\"\"\" with open(config_path, 'r') as f: return yaml.safe_load(f) def get_log_object_for_training(collection_name: str, execution_id :", "schema from schema_prediction.json \"\"\" params=read_params() path=params['data_schemas']['prediction_schema'] with open(path) as f: schema=json.load(f) LengthOfDateStampInFile =", "the project. Defaults to None. is_log_enabled (bool, optional): If it is set to", "optional): Execution id. Defaults to None. executed_by (str, optional): Executed by. Defaults to", "dirctory Args: path (str): Give the full path with directory name is_recreate (bool,", "of the project. Defaults to None. is_log_enabled (bool, optional): If it is set", "directory name is_recreate (bool, optional): If True then it will first delete and", "import shutil import yaml import json from app_logger import logger from datetime import", "will write the logs. Defaults to True. Returns: Logger: Logger Object \"\"\" params=read_params()", "directory . Defaults to False. \"\"\" if is_recreate: try: shutil.rmtree(path) except Exception: pass", "with open(config_path, 'r') as f: return yaml.safe_load(f) def get_log_object_for_training(collection_name: str, execution_id : str=None,", "False)->None: \"\"\"Utility to create the dirctory Args: path (str): Give the full path", "Executed by. Defaults to None. project_id (str, optional): Id of the project. Defaults", "executed_by=params['base']['author'] if project_id==None: project_id = params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['prediction_logs']['database_name'], collection_name=collection_name,", "str, execution_id : str=None, executed_by: str=None, project_id :str=None, is_log_enabled : bool=True): \"\"\"It will", "str=None, executed_by: str=None, project_id :str=None, is_log_enabled : bool=True): \"\"\"It will give the Log", "def get_date(): \"\"\"Returns the current date. \"\"\" return datetime.now().date().strftime('%d-%m-%y') def get_time(): \"\"\"Returns the", "path (str): Give the full path with directory name is_recreate (bool, optional): If", "details of the yaml file \"\"\" with open(config_path, 'r') as f: return yaml.safe_load(f)", "Defaults to None. project_id (str, optional): Id of the project. Defaults to None.", "\"\"\" params=read_params() path = params['data_schemas']['training_schema'] with open(path) as f: schema=json.load(f) LengthOfDateStampInFile = schema['LengthOfDateStampInFile']", "schema_prediction.json \"\"\" params=read_params() path=params['data_schemas']['prediction_schema'] with open(path) as f: schema=json.load(f) LengthOfDateStampInFile = schema['LengthOfDateStampInFile'] LengthOfTimeStampInFile", "execution_id (str, optional): Execution id. Defaults to None. executed_by (str, optional): Executed by.", "if project_id==None: project_id = params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['training_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled)", "as f: schema=json.load(f) LengthOfDateStampInFile = schema['LengthOfDateStampInFile'] LengthOfTimeStampInFile = schema['LengthOfTimeStampInFile'] NumberofColumns = schema['NumberofColumns'] ColName", "= schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def get_date(): \"\"\"Returns the current date. \"\"\" return datetime.now().date().strftime('%d-%m-%y')", "True. Returns: Logger: Logger Object \"\"\" params=read_params() if execution_id==None: execution_id=uuid.uuid4().hex if executed_by==None: executed_by=params['base']['author']", "is_recreate: bool = False)->None: \"\"\"Utility to create the dirctory Args: path (str): Give", "if project_id==None: project_id = params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['prediction_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled)", "= schema['NumberofColumns'] ColName = schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def read_training_schema(): \"\"\"Responsible for reading the", "collection_name (str): Name of the collection in which the log will be stored", "to 'config/params.yaml' Returns: dict: Return the details of the yaml file \"\"\" with", "the log will be stored execution_id (str, optional): Execution id. Defaults to None.", "to None. executed_by (str, optional): Executed by. Defaults to None. project_id (str, optional):", "execution_id==None: execution_id=uuid.uuid4().hex if executed_by==None: executed_by=params['base']['author'] if project_id==None: project_id = params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id,", "def read_prediction_schema(): \"\"\"Responsible for reading the schema from schema_prediction.json \"\"\" params=read_params() path=params['data_schemas']['prediction_schema'] with", "= schema['LengthOfTimeStampInFile'] NumberofColumns = schema['NumberofColumns'] ColName = schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def get_date(): \"\"\"Returns", "If True then it will first delete and then ceate the directory .", "of the Yaml file . Defaults to 'config/params.yaml' Returns: dict: Return the details", "def get_log_object_for_prediction(collection_name: str, execution_id : str=None, executed_by: str=None, project_id :str=None, is_log_enabled : bool=True):", "read_params(config_path: str ='config/params.yaml')->dict: \"\"\"Responsible for reading the yaml file Args: config_path (str): Path", "file Args: config_path (str): Path of the Yaml file . Defaults to 'config/params.yaml'", "it is set to True then only it will write the logs. Defaults", "if execution_id==None: execution_id=uuid.uuid4().hex if executed_by==None: executed_by=params['base']['author'] if project_id==None: project_id = params['base']['project_id'] logger_obj =", "schema['LengthOfTimeStampInFile'] NumberofColumns = schema['NumberofColumns'] ColName = schema['ColName'] return LengthOfDateStampInFile,LengthOfTimeStampInFile,NumberofColumns,ColName def read_training_schema(): \"\"\"Responsible for", "already exists def read_params(config_path: str ='config/params.yaml')->dict: \"\"\"Responsible for reading the yaml file Args:", "read import os import shutil import yaml import json from app_logger import logger", "Args: collection_name (str): Name of the collection in which the log will be", "logs. Defaults to True. Returns: Logger: Logger Object \"\"\" params=read_params() if execution_id==None: execution_id=uuid.uuid4().hex", "in which the log will be stored execution_id (str, optional): Execution id. Defaults", "= params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['training_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def", "logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['training_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def get_log_object_for_prediction(collection_name: str,", "\"\"\" if is_recreate: try: shutil.rmtree(path) except Exception: pass os.makedirs(path,exist_ok=True) # It will not", "project_id==None: project_id = params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['prediction_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return", "name is_recreate (bool, optional): If True then it will first delete and then", "None. project_id (str, optional): Id of the project. Defaults to None. is_log_enabled (bool,", "reading the schema from schema_prediction.json \"\"\" params=read_params() path=params['data_schemas']['prediction_schema'] with open(path) as f: schema=json.load(f)", "from datetime import datetime import uuid def create_directory(path: str, is_recreate: bool = False)->None:", "create_directory(path: str, is_recreate: bool = False)->None: \"\"\"Utility to create the dirctory Args: path", "Id of the project. Defaults to None. is_log_enabled (bool, optional): If it is", "Args: path (str): Give the full path with directory name is_recreate (bool, optional):", "then it will first delete and then ceate the directory . Defaults to", "with open(path) as f: schema=json.load(f) LengthOfDateStampInFile = schema['LengthOfDateStampInFile'] LengthOfTimeStampInFile = schema['LengthOfTimeStampInFile'] NumberofColumns =", "datetime import datetime import uuid def create_directory(path: str, is_recreate: bool = False)->None: \"\"\"Utility", "databasename=params['database_logs']['prediction_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj def read_prediction_schema(): \"\"\"Responsible for reading the schema from", "params=read_params() if execution_id==None: execution_id=uuid.uuid4().hex if executed_by==None: executed_by=params['base']['author'] if project_id==None: project_id = params['base']['project_id'] logger_obj", "project_id = params['base']['project_id'] logger_obj = logger.Logger(execution_id=execution_id, executed_by=executed_by, project_id=project_id, databasename=params['database_logs']['prediction_logs']['database_name'], collection_name=collection_name, is_log_enabled=is_log_enabled) return logger_obj", "for reading the yaml file Args: config_path (str): Path of the Yaml file", ":str=None, is_log_enabled : bool=True): \"\"\"It will give the Log Object for training Args:", "It will not through error if the folder already exists def read_params(config_path: str" ]
[ "overhead in the \"bridge\" protocol used by pyjulia. The idea of using naively", "current index, at time t and after :math:`N_k(t)` pulls of arm k: ..", "there is a lot of overhead in the \"bridge\" protocol used by pyjulia.", "e: print(\"Error: unable to load the 'julia' Python module. Install with 'pip install", "the current index, at time t and after :math:`N_k(t)` pulls of arm k:", "IndexPolicy class UCBjulia(IndexPolicy): \"\"\" The UCB policy for bounded bandits, with UCB indexes", "& Robbins, 1985]. .. warning:: This is only experimental, and purely useless. See", "try: import julia except ImportError as e: print(\"Error: unable to load the 'julia'", "this policy from sys import path from os.path import dirname path.insert(0, '/'.join(dirname(__file__).split('/')[:-1])) try:", "raise e _j = julia.Julia() try: self._index_function = _j.evalfile(\"Policies/UCBjulia.jl\") except RuntimeError: try: self._index_function", "from 'UCBjulia.jl' is bugged or unavailable.\") # WARNING def computeIndex(self, arm): r\"\"\" Compute", "A naive benchmark showed that in this approach, :class:`UCBjulia` (used withing Python) is", "self.t = 0 # Importing the julia module and creating the bridge try:", "experimental, and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" def __init__(self, nbArms, lower=0., amplitude=1.): \"\"\"", "# for Python and the 1-based indexes in Julia. The rest works pretty", "arm): r\"\"\" Compute the current index, at time t and after :math:`N_k(t)` pulls", "path.insert(0, '/'.join(dirname(__file__).split('/')[:-1])) try: from .IndexPolicy import IndexPolicy except ImportError: from IndexPolicy import IndexPolicy", "Python 2 compatibility __author__ = \"<NAME>\" __version__ = \"0.9\" # WARNING: this is", "import julia except ImportError as e: print(\"Error: unable to load the 'julia' Python", "except (RuntimeError, ValueError): raise ValueError(\"Error: the index function loaded from 'UCBjulia.jl' is bugged", "__future__ import division, print_function # Python 2 compatibility __author__ = \"<NAME>\" __version__ =", "indexes computed with Julia. Reference: [Lai & Robbins, 1985]. .. warning:: This is", "Julia function *from* Python will not speed up anything, as there is a", "= \"<NAME>\" __version__ = \"0.9\" # WARNING: this is a HUGE hack to", "_j.evalfile(\"UCBjulia.jl\") except RuntimeError: raise ValueError(\"Error: Unable to load 'UCBjulia.jl' julia file.\") # WARNING", "pulls of arm k: .. math:: I_k(t) = \\frac{X_k(t)}{N_k(t)} + \\sqrt{\\frac{2 \\log(t)}{N_k(t)}}. \"\"\"", "'UCBjulia.jl' julia file.\") # WARNING try: self._index_function([1], [1], 1, 1) except (RuntimeError, ValueError):", "difference between 0-based indexes # for Python and the 1-based indexes in Julia.", "'pip install julia', or see https://github.com/JuliaPy/pyjulia/\") # DEBUG raise e _j = julia.Julia()", "__author__ = \"<NAME>\" __version__ = \"0.9\" # WARNING: this is a HUGE hack", "HUGE hack to fix a mystery bug on importing this policy from sys", "by pyjulia. The idea of using naively a tiny Julia function to speed", "try: self._index_function = _j.evalfile(\"Policies/UCBjulia.jl\") except RuntimeError: try: self._index_function = _j.evalfile(\"UCBjulia.jl\") except RuntimeError: raise", "is only experimental, and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" from __future__ import division,", "(used withing Python) is about 125 times slower (!) than :class:`UCB`. .. warning::", "tiny Julia function to speed up computations is basically useless. A naive benchmark", "print_function # Python 2 compatibility __author__ = \"<NAME>\" __version__ = \"0.9\" # WARNING:", "import path from os.path import dirname path.insert(0, '/'.join(dirname(__file__).split('/')[:-1])) try: from .IndexPolicy import IndexPolicy", "from __future__ import division, print_function # Python 2 compatibility __author__ = \"<NAME>\" __version__", "\"bridge\" protocol used by pyjulia. The idea of using naively a tiny Julia", "Python will not speed up anything, as there is a lot of overhead", "from os.path import dirname path.insert(0, '/'.join(dirname(__file__).split('/')[:-1])) try: from .IndexPolicy import IndexPolicy except ImportError:", "import IndexPolicy except ImportError: from IndexPolicy import IndexPolicy class UCBjulia(IndexPolicy): \"\"\" The UCB", "\\frac{X_k(t)}{N_k(t)} + \\sqrt{\\frac{2 \\log(t)}{N_k(t)}}. \"\"\" # WARNING: the 'arm + 1' part comes", "warning:: This is only experimental, and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" def __init__(self,", "bandits, with UCB indexes computed with Julia. Reference: [Lai & Robbins, 1985]. ..", "slower (!) than :class:`UCB`. .. warning:: This is only experimental, and purely useless.", "# WARNING try: self._index_function([1], [1], 1, 1) except (RuntimeError, ValueError): raise ValueError(\"Error: the", "except RuntimeError: try: self._index_function = _j.evalfile(\"UCBjulia.jl\") except RuntimeError: raise ValueError(\"Error: Unable to load", "r\"\"\" Compute the current index, at time t and after :math:`N_k(t)` pulls of", "try: self._index_function = _j.evalfile(\"UCBjulia.jl\") except RuntimeError: raise ValueError(\"Error: Unable to load 'UCBjulia.jl' julia", "basically useless. A naive benchmark showed that in this approach, :class:`UCBjulia` (used withing", "on importing this policy from sys import path from os.path import dirname path.insert(0,", "to load the 'julia' Python module. Install with 'pip install julia', or see", "self._index_function([1], [1], 1, 1) except (RuntimeError, ValueError): raise ValueError(\"Error: the index function loaded", "purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" def __init__(self, nbArms, lower=0., amplitude=1.): \"\"\" Will fail", "useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" from __future__ import division, print_function # Python 2 compatibility", "ValueError(\"Error: the index function loaded from 'UCBjulia.jl' is bugged or unavailable.\") # WARNING", "'arm + 1' part comes from the difference between 0-based indexes # for", "bounded bandits, with UCB indexes computed with Julia. Reference: [Lai & Robbins, 1985].", "UCB policy for bounded bandits, with UCB indexes computed with Julia. Reference: [Lai", "\\log(t)}{N_k(t)}}. \"\"\" # WARNING: the 'arm + 1' part comes from the difference", "UCB indexes computed with Julia. Reference: [Lai & Robbins, 1985]. .. warning:: Using", "1-based indexes in Julia. The rest works pretty well! return self._index_function(self.rewards, self.pulls, self.t,", "warning:: This is only experimental, and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" from __future__", "load 'UCBjulia.jl' julia file.\") # WARNING try: self._index_function([1], [1], 1, 1) except (RuntimeError,", "# WARNING: the 'arm + 1' part comes from the difference between 0-based", "def computeIndex(self, arm): r\"\"\" Compute the current index, at time t and after", "[Lai & Robbins, 1985]. .. warning:: This is only experimental, and purely useless.", "& Robbins, 1985]. .. warning:: Using a Julia function *from* Python will not", "1985]. .. warning:: Using a Julia function *from* Python will not speed up", "and the 1-based indexes in Julia. The rest works pretty well! return self._index_function(self.rewards,", "computations is basically useless. A naive benchmark showed that in this approach, :class:`UCBjulia`", "of arm k: .. math:: I_k(t) = \\frac{X_k(t)}{N_k(t)} + \\sqrt{\\frac{2 \\log(t)}{N_k(t)}}. \"\"\" #", "function loaded from 'UCBjulia.jl' is bugged or unavailable.\") # WARNING def computeIndex(self, arm):", "0 # Importing the julia module and creating the bridge try: import julia", "lot of overhead in the \"bridge\" protocol used by pyjulia. The idea of", "https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" from __future__ import division, print_function # Python 2 compatibility __author__ =", "Julia. Reference: [Lai & Robbins, 1985]. .. warning:: Using a Julia function *from*", "amplitude=amplitude) self.t = 0 # Importing the julia module and creating the bridge", "\"\"\" Will fail directly if the bridge with julia is unavailable or buggy.\"\"\"", "naively a tiny Julia function to speed up computations is basically useless. A", "Julia. Reference: [Lai & Robbins, 1985]. .. warning:: This is only experimental, and", "RuntimeError: raise ValueError(\"Error: Unable to load 'UCBjulia.jl' julia file.\") # WARNING try: self._index_function([1],", "used by pyjulia. The idea of using naively a tiny Julia function to", "nbArms, lower=0., amplitude=1.): \"\"\" Will fail directly if the bridge with julia is", "lower=lower, amplitude=amplitude) self.t = 0 # Importing the julia module and creating the", "to fix a mystery bug on importing this policy from sys import path", "Python) is about 125 times slower (!) than :class:`UCB`. .. warning:: This is", "# WARNING def computeIndex(self, arm): r\"\"\" Compute the current index, at time t", "if the bridge with julia is unavailable or buggy.\"\"\" super(UCBjulia, self).__init__(nbArms, lower=lower, amplitude=amplitude)", "a Julia function *from* Python will not speed up anything, as there is", "1' part comes from the difference between 0-based indexes # for Python and", "at time t and after :math:`N_k(t)` pulls of arm k: .. math:: I_k(t)", "1985]. .. warning:: This is only experimental, and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\"", "import IndexPolicy class UCBjulia(IndexPolicy): \"\"\" The UCB policy for bounded bandits, with UCB", "comes from the difference between 0-based indexes # for Python and the 1-based", "policy from sys import path from os.path import dirname path.insert(0, '/'.join(dirname(__file__).split('/')[:-1])) try: from", "WARNING def computeIndex(self, arm): r\"\"\" Compute the current index, at time t and", "self._index_function = _j.evalfile(\"UCBjulia.jl\") except RuntimeError: raise ValueError(\"Error: Unable to load 'UCBjulia.jl' julia file.\")", "'UCBjulia.jl' is bugged or unavailable.\") # WARNING def computeIndex(self, arm): r\"\"\" Compute the", "k: .. math:: I_k(t) = \\frac{X_k(t)}{N_k(t)} + \\sqrt{\\frac{2 \\log(t)}{N_k(t)}}. \"\"\" # WARNING: the", "for Python and the 1-based indexes in Julia. The rest works pretty well!", ":class:`UCB`. .. warning:: This is only experimental, and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\"", "and after :math:`N_k(t)` pulls of arm k: .. math:: I_k(t) = \\frac{X_k(t)}{N_k(t)} +", "or buggy.\"\"\" super(UCBjulia, self).__init__(nbArms, lower=lower, amplitude=amplitude) self.t = 0 # Importing the julia", "than :class:`UCB`. .. warning:: This is only experimental, and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98", "is unavailable or buggy.\"\"\" super(UCBjulia, self).__init__(nbArms, lower=lower, amplitude=amplitude) self.t = 0 # Importing", "computed with Julia. Reference: [Lai & Robbins, 1985]. .. warning:: Using a Julia", "and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" def __init__(self, nbArms, lower=0., amplitude=1.): \"\"\" Will", "# -*- coding: utf-8 -*- \"\"\" The UCB policy for bounded bandits, with", "warning:: Using a Julia function *from* Python will not speed up anything, as", "Unable to load 'UCBjulia.jl' julia file.\") # WARNING try: self._index_function([1], [1], 1, 1)", "Robbins, 1985]. .. warning:: This is only experimental, and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98", "computeIndex(self, arm): r\"\"\" Compute the current index, at time t and after :math:`N_k(t)`", "fix a mystery bug on importing this policy from sys import path from", "UCB indexes computed with Julia. Reference: [Lai & Robbins, 1985]. .. warning:: This", "self._index_function = _j.evalfile(\"Policies/UCBjulia.jl\") except RuntimeError: try: self._index_function = _j.evalfile(\"UCBjulia.jl\") except RuntimeError: raise ValueError(\"Error:", "of overhead in the \"bridge\" protocol used by pyjulia. The idea of using", "\"<NAME>\" __version__ = \"0.9\" # WARNING: this is a HUGE hack to fix", "anything, as there is a lot of overhead in the \"bridge\" protocol used", "withing Python) is about 125 times slower (!) than :class:`UCB`. .. warning:: This", "computed with Julia. Reference: [Lai & Robbins, 1985]. .. warning:: This is only", "useless. A naive benchmark showed that in this approach, :class:`UCBjulia` (used withing Python)", "the 'julia' Python module. Install with 'pip install julia', or see https://github.com/JuliaPy/pyjulia/\") #", "time t and after :math:`N_k(t)` pulls of arm k: .. math:: I_k(t) =", "function to speed up computations is basically useless. A naive benchmark showed that", "after :math:`N_k(t)` pulls of arm k: .. math:: I_k(t) = \\frac{X_k(t)}{N_k(t)} + \\sqrt{\\frac{2", "to load 'UCBjulia.jl' julia file.\") # WARNING try: self._index_function([1], [1], 1, 1) except", "= 0 # Importing the julia module and creating the bridge try: import", "self).__init__(nbArms, lower=lower, amplitude=amplitude) self.t = 0 # Importing the julia module and creating", "load the 'julia' Python module. Install with 'pip install julia', or see https://github.com/JuliaPy/pyjulia/\")", "useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" def __init__(self, nbArms, lower=0., amplitude=1.): \"\"\" Will fail directly", "Python and the 1-based indexes in Julia. The rest works pretty well! return", "\"\"\" # WARNING: the 'arm + 1' part comes from the difference between", "# WARNING: this is a HUGE hack to fix a mystery bug on", "this is a HUGE hack to fix a mystery bug on importing this", "pyjulia. The idea of using naively a tiny Julia function to speed up", "_j = julia.Julia() try: self._index_function = _j.evalfile(\"Policies/UCBjulia.jl\") except RuntimeError: try: self._index_function = _j.evalfile(\"UCBjulia.jl\")", "ValueError): raise ValueError(\"Error: the index function loaded from 'UCBjulia.jl' is bugged or unavailable.\")", "julia.Julia() try: self._index_function = _j.evalfile(\"Policies/UCBjulia.jl\") except RuntimeError: try: self._index_function = _j.evalfile(\"UCBjulia.jl\") except RuntimeError:", "sys import path from os.path import dirname path.insert(0, '/'.join(dirname(__file__).split('/')[:-1])) try: from .IndexPolicy import", "is only experimental, and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" def __init__(self, nbArms, lower=0.,", "Python module. Install with 'pip install julia', or see https://github.com/JuliaPy/pyjulia/\") # DEBUG raise", "this approach, :class:`UCBjulia` (used withing Python) is about 125 times slower (!) than", "a tiny Julia function to speed up computations is basically useless. A naive", "125 times slower (!) than :class:`UCB`. .. warning:: This is only experimental, and", "only experimental, and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" from __future__ import division, print_function", "compatibility __author__ = \"<NAME>\" __version__ = \"0.9\" # WARNING: this is a HUGE", "unavailable or buggy.\"\"\" super(UCBjulia, self).__init__(nbArms, lower=lower, amplitude=amplitude) self.t = 0 # Importing the", "e _j = julia.Julia() try: self._index_function = _j.evalfile(\"Policies/UCBjulia.jl\") except RuntimeError: try: self._index_function =", "times slower (!) than :class:`UCB`. .. warning:: This is only experimental, and purely", "ValueError(\"Error: Unable to load 'UCBjulia.jl' julia file.\") # WARNING try: self._index_function([1], [1], 1,", "julia', or see https://github.com/JuliaPy/pyjulia/\") # DEBUG raise e _j = julia.Julia() try: self._index_function", "is a HUGE hack to fix a mystery bug on importing this policy", "+ 1' part comes from the difference between 0-based indexes # for Python", "except ImportError as e: print(\"Error: unable to load the 'julia' Python module. Install", "See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" def __init__(self, nbArms, lower=0., amplitude=1.): \"\"\" Will fail directly if", "from .IndexPolicy import IndexPolicy except ImportError: from IndexPolicy import IndexPolicy class UCBjulia(IndexPolicy): \"\"\"", "or unavailable.\") # WARNING def computeIndex(self, arm): r\"\"\" Compute the current index, at", "print(\"Error: unable to load the 'julia' Python module. Install with 'pip install julia',", "except RuntimeError: raise ValueError(\"Error: Unable to load 'UCBjulia.jl' julia file.\") # WARNING try:", "import division, print_function # Python 2 compatibility __author__ = \"<NAME>\" __version__ = \"0.9\"", "[Lai & Robbins, 1985]. .. warning:: Using a Julia function *from* Python will", "approach, :class:`UCBjulia` (used withing Python) is about 125 times slower (!) than :class:`UCB`.", "the index function loaded from 'UCBjulia.jl' is bugged or unavailable.\") # WARNING def", "to speed up computations is basically useless. A naive benchmark showed that in", "creating the bridge try: import julia except ImportError as e: print(\"Error: unable to", "the difference between 0-based indexes # for Python and the 1-based indexes in", "IndexPolicy import IndexPolicy class UCBjulia(IndexPolicy): \"\"\" The UCB policy for bounded bandits, with", "policy for bounded bandits, with UCB indexes computed with Julia. Reference: [Lai &", "Using a Julia function *from* Python will not speed up anything, as there", "2 compatibility __author__ = \"<NAME>\" __version__ = \"0.9\" # WARNING: this is a", "This is only experimental, and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" def __init__(self, nbArms,", "is bugged or unavailable.\") # WARNING def computeIndex(self, arm): r\"\"\" Compute the current", "t and after :math:`N_k(t)` pulls of arm k: .. math:: I_k(t) = \\frac{X_k(t)}{N_k(t)}", "naive benchmark showed that in this approach, :class:`UCBjulia` (used withing Python) is about", "as there is a lot of overhead in the \"bridge\" protocol used by", "protocol used by pyjulia. The idea of using naively a tiny Julia function", "bridge try: import julia except ImportError as e: print(\"Error: unable to load the", "try: self._index_function([1], [1], 1, 1) except (RuntimeError, ValueError): raise ValueError(\"Error: the index function", "'/'.join(dirname(__file__).split('/')[:-1])) try: from .IndexPolicy import IndexPolicy except ImportError: from IndexPolicy import IndexPolicy class", "benchmark showed that in this approach, :class:`UCBjulia` (used withing Python) is about 125", "showed that in this approach, :class:`UCBjulia` (used withing Python) is about 125 times", "dirname path.insert(0, '/'.join(dirname(__file__).split('/')[:-1])) try: from .IndexPolicy import IndexPolicy except ImportError: from IndexPolicy import", "*from* Python will not speed up anything, as there is a lot of", ".. math:: I_k(t) = \\frac{X_k(t)}{N_k(t)} + \\sqrt{\\frac{2 \\log(t)}{N_k(t)}}. \"\"\" # WARNING: the 'arm", "except ImportError: from IndexPolicy import IndexPolicy class UCBjulia(IndexPolicy): \"\"\" The UCB policy for", "the julia module and creating the bridge try: import julia except ImportError as", "unable to load the 'julia' Python module. Install with 'pip install julia', or", "# Python 2 compatibility __author__ = \"<NAME>\" __version__ = \"0.9\" # WARNING: this", "Importing the julia module and creating the bridge try: import julia except ImportError", "will not speed up anything, as there is a lot of overhead in", "idea of using naively a tiny Julia function to speed up computations is", "directly if the bridge with julia is unavailable or buggy.\"\"\" super(UCBjulia, self).__init__(nbArms, lower=lower,", "raise ValueError(\"Error: the index function loaded from 'UCBjulia.jl' is bugged or unavailable.\") #", "up computations is basically useless. A naive benchmark showed that in this approach,", "UCBjulia(IndexPolicy): \"\"\" The UCB policy for bounded bandits, with UCB indexes computed with", "'julia' Python module. Install with 'pip install julia', or see https://github.com/JuliaPy/pyjulia/\") # DEBUG", "Install with 'pip install julia', or see https://github.com/JuliaPy/pyjulia/\") # DEBUG raise e _j", "1) except (RuntimeError, ValueError): raise ValueError(\"Error: the index function loaded from 'UCBjulia.jl' is", "coding: utf-8 -*- \"\"\" The UCB policy for bounded bandits, with UCB indexes", ":class:`UCBjulia` (used withing Python) is about 125 times slower (!) than :class:`UCB`. ..", "def __init__(self, nbArms, lower=0., amplitude=1.): \"\"\" Will fail directly if the bridge with", "unavailable.\") # WARNING def computeIndex(self, arm): r\"\"\" Compute the current index, at time", "\"0.9\" # WARNING: this is a HUGE hack to fix a mystery bug", "mystery bug on importing this policy from sys import path from os.path import", "function *from* Python will not speed up anything, as there is a lot", "try: from .IndexPolicy import IndexPolicy except ImportError: from IndexPolicy import IndexPolicy class UCBjulia(IndexPolicy):", "https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" def __init__(self, nbArms, lower=0., amplitude=1.): \"\"\" Will fail directly if the", "path from os.path import dirname path.insert(0, '/'.join(dirname(__file__).split('/')[:-1])) try: from .IndexPolicy import IndexPolicy except", "module and creating the bridge try: import julia except ImportError as e: print(\"Error:", "division, print_function # Python 2 compatibility __author__ = \"<NAME>\" __version__ = \"0.9\" #", "the bridge try: import julia except ImportError as e: print(\"Error: unable to load", "I_k(t) = \\frac{X_k(t)}{N_k(t)} + \\sqrt{\\frac{2 \\log(t)}{N_k(t)}}. \"\"\" # WARNING: the 'arm + 1'", "= _j.evalfile(\"Policies/UCBjulia.jl\") except RuntimeError: try: self._index_function = _j.evalfile(\"UCBjulia.jl\") except RuntimeError: raise ValueError(\"Error: Unable", "Robbins, 1985]. .. warning:: Using a Julia function *from* Python will not speed", "<reponame>balbok0/SMPyBandits # -*- coding: utf-8 -*- \"\"\" The UCB policy for bounded bandits,", "with Julia. Reference: [Lai & Robbins, 1985]. .. warning:: This is only experimental,", "speed up computations is basically useless. A naive benchmark showed that in this", "index function loaded from 'UCBjulia.jl' is bugged or unavailable.\") # WARNING def computeIndex(self,", "hack to fix a mystery bug on importing this policy from sys import", "bugged or unavailable.\") # WARNING def computeIndex(self, arm): r\"\"\" Compute the current index,", "ImportError: from IndexPolicy import IndexPolicy class UCBjulia(IndexPolicy): \"\"\" The UCB policy for bounded", "in the \"bridge\" protocol used by pyjulia. The idea of using naively a", "See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" from __future__ import division, print_function # Python 2 compatibility __author__", "see https://github.com/JuliaPy/pyjulia/\") # DEBUG raise e _j = julia.Julia() try: self._index_function = _j.evalfile(\"Policies/UCBjulia.jl\")", "and creating the bridge try: import julia except ImportError as e: print(\"Error: unable", "_j.evalfile(\"Policies/UCBjulia.jl\") except RuntimeError: try: self._index_function = _j.evalfile(\"UCBjulia.jl\") except RuntimeError: raise ValueError(\"Error: Unable to", "bridge with julia is unavailable or buggy.\"\"\" super(UCBjulia, self).__init__(nbArms, lower=lower, amplitude=amplitude) self.t =", "indexes computed with Julia. Reference: [Lai & Robbins, 1985]. .. warning:: Using a", ".IndexPolicy import IndexPolicy except ImportError: from IndexPolicy import IndexPolicy class UCBjulia(IndexPolicy): \"\"\" The", "julia is unavailable or buggy.\"\"\" super(UCBjulia, self).__init__(nbArms, lower=lower, amplitude=amplitude) self.t = 0 #", "buggy.\"\"\" super(UCBjulia, self).__init__(nbArms, lower=lower, amplitude=amplitude) self.t = 0 # Importing the julia module", "# Importing the julia module and creating the bridge try: import julia except", "is basically useless. A naive benchmark showed that in this approach, :class:`UCBjulia` (used", "(RuntimeError, ValueError): raise ValueError(\"Error: the index function loaded from 'UCBjulia.jl' is bugged or", "RuntimeError: try: self._index_function = _j.evalfile(\"UCBjulia.jl\") except RuntimeError: raise ValueError(\"Error: Unable to load 'UCBjulia.jl'", "julia except ImportError as e: print(\"Error: unable to load the 'julia' Python module.", "super(UCBjulia, self).__init__(nbArms, lower=lower, amplitude=amplitude) self.t = 0 # Importing the julia module and", "WARNING: the 'arm + 1' part comes from the difference between 0-based indexes", "= \"0.9\" # WARNING: this is a HUGE hack to fix a mystery", "is a lot of overhead in the \"bridge\" protocol used by pyjulia. The", "\"\"\" The UCB policy for bounded bandits, with UCB indexes computed with Julia.", "os.path import dirname path.insert(0, '/'.join(dirname(__file__).split('/')[:-1])) try: from .IndexPolicy import IndexPolicy except ImportError: from", "math:: I_k(t) = \\frac{X_k(t)}{N_k(t)} + \\sqrt{\\frac{2 \\log(t)}{N_k(t)}}. \"\"\" # WARNING: the 'arm +", "julia module and creating the bridge try: import julia except ImportError as e:", "= _j.evalfile(\"UCBjulia.jl\") except RuntimeError: raise ValueError(\"Error: Unable to load 'UCBjulia.jl' julia file.\") #", "experimental, and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" from __future__ import division, print_function #", "and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" from __future__ import division, print_function # Python", "the bridge with julia is unavailable or buggy.\"\"\" super(UCBjulia, self).__init__(nbArms, lower=lower, amplitude=amplitude) self.t", "is about 125 times slower (!) than :class:`UCB`. .. warning:: This is only", "[1], 1, 1) except (RuntimeError, ValueError): raise ValueError(\"Error: the index function loaded from", "from sys import path from os.path import dirname path.insert(0, '/'.join(dirname(__file__).split('/')[:-1])) try: from .IndexPolicy", "a lot of overhead in the \"bridge\" protocol used by pyjulia. The idea", "purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" from __future__ import division, print_function # Python 2", "-*- \"\"\" The UCB policy for bounded bandits, with UCB indexes computed with", "arm k: .. math:: I_k(t) = \\frac{X_k(t)}{N_k(t)} + \\sqrt{\\frac{2 \\log(t)}{N_k(t)}}. \"\"\" # WARNING:", "ImportError as e: print(\"Error: unable to load the 'julia' Python module. Install with", "# DEBUG raise e _j = julia.Julia() try: self._index_function = _j.evalfile(\"Policies/UCBjulia.jl\") except RuntimeError:", "lower=0., amplitude=1.): \"\"\" Will fail directly if the bridge with julia is unavailable", "that in this approach, :class:`UCBjulia` (used withing Python) is about 125 times slower", "in Julia. The rest works pretty well! return self._index_function(self.rewards, self.pulls, self.t, arm +", "speed up anything, as there is a lot of overhead in the \"bridge\"", ".. warning:: Using a Julia function *from* Python will not speed up anything,", "from IndexPolicy import IndexPolicy class UCBjulia(IndexPolicy): \"\"\" The UCB policy for bounded bandits,", "indexes in Julia. The rest works pretty well! return self._index_function(self.rewards, self.pulls, self.t, arm", "amplitude=1.): \"\"\" Will fail directly if the bridge with julia is unavailable or", "1, 1) except (RuntimeError, ValueError): raise ValueError(\"Error: the index function loaded from 'UCBjulia.jl'", "with UCB indexes computed with Julia. Reference: [Lai & Robbins, 1985]. .. warning::", "This is only experimental, and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" from __future__ import", "__init__(self, nbArms, lower=0., amplitude=1.): \"\"\" Will fail directly if the bridge with julia", "with julia is unavailable or buggy.\"\"\" super(UCBjulia, self).__init__(nbArms, lower=lower, amplitude=amplitude) self.t = 0", "\"\"\" from __future__ import division, print_function # Python 2 compatibility __author__ = \"<NAME>\"", "Julia. The rest works pretty well! return self._index_function(self.rewards, self.pulls, self.t, arm + 1)", "the 'arm + 1' part comes from the difference between 0-based indexes #", "for bounded bandits, with UCB indexes computed with Julia. Reference: [Lai & Robbins,", "The idea of using naively a tiny Julia function to speed up computations", "a mystery bug on importing this policy from sys import path from os.path", "about 125 times slower (!) than :class:`UCB`. .. warning:: This is only experimental,", "of using naively a tiny Julia function to speed up computations is basically", "only experimental, and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" def __init__(self, nbArms, lower=0., amplitude=1.):", "or see https://github.com/JuliaPy/pyjulia/\") # DEBUG raise e _j = julia.Julia() try: self._index_function =", "using naively a tiny Julia function to speed up computations is basically useless.", "from the difference between 0-based indexes # for Python and the 1-based indexes", "with Julia. Reference: [Lai & Robbins, 1985]. .. warning:: Using a Julia function", "index, at time t and after :math:`N_k(t)` pulls of arm k: .. math::", "__version__ = \"0.9\" # WARNING: this is a HUGE hack to fix a", "Will fail directly if the bridge with julia is unavailable or buggy.\"\"\" super(UCBjulia,", "part comes from the difference between 0-based indexes # for Python and the", "+ \\sqrt{\\frac{2 \\log(t)}{N_k(t)}}. \"\"\" # WARNING: the 'arm + 1' part comes from", "Reference: [Lai & Robbins, 1985]. .. warning:: This is only experimental, and purely", "import dirname path.insert(0, '/'.join(dirname(__file__).split('/')[:-1])) try: from .IndexPolicy import IndexPolicy except ImportError: from IndexPolicy", "Reference: [Lai & Robbins, 1985]. .. warning:: Using a Julia function *from* Python", "between 0-based indexes # for Python and the 1-based indexes in Julia. The", ".. warning:: This is only experimental, and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" def", "install julia', or see https://github.com/JuliaPy/pyjulia/\") # DEBUG raise e _j = julia.Julia() try:", "IndexPolicy except ImportError: from IndexPolicy import IndexPolicy class UCBjulia(IndexPolicy): \"\"\" The UCB policy", "the \"bridge\" protocol used by pyjulia. The idea of using naively a tiny", ".. warning:: This is only experimental, and purely useless. See https://github.com/SMPyBandits/SMPyBandits/issues/98 \"\"\" from", "https://github.com/JuliaPy/pyjulia/\") # DEBUG raise e _j = julia.Julia() try: self._index_function = _j.evalfile(\"Policies/UCBjulia.jl\") except", "up anything, as there is a lot of overhead in the \"bridge\" protocol", "in this approach, :class:`UCBjulia` (used withing Python) is about 125 times slower (!)", "(!) than :class:`UCB`. .. warning:: This is only experimental, and purely useless. See", "indexes # for Python and the 1-based indexes in Julia. The rest works", "\\sqrt{\\frac{2 \\log(t)}{N_k(t)}}. \"\"\" # WARNING: the 'arm + 1' part comes from the", "0-based indexes # for Python and the 1-based indexes in Julia. The rest", "module. Install with 'pip install julia', or see https://github.com/JuliaPy/pyjulia/\") # DEBUG raise e", "utf-8 -*- \"\"\" The UCB policy for bounded bandits, with UCB indexes computed", "Compute the current index, at time t and after :math:`N_k(t)` pulls of arm", "= \\frac{X_k(t)}{N_k(t)} + \\sqrt{\\frac{2 \\log(t)}{N_k(t)}}. \"\"\" # WARNING: the 'arm + 1' part", "a HUGE hack to fix a mystery bug on importing this policy from", "file.\") # WARNING try: self._index_function([1], [1], 1, 1) except (RuntimeError, ValueError): raise ValueError(\"Error:", "WARNING try: self._index_function([1], [1], 1, 1) except (RuntimeError, ValueError): raise ValueError(\"Error: the index", "importing this policy from sys import path from os.path import dirname path.insert(0, '/'.join(dirname(__file__).split('/')[:-1]))", "\"\"\" def __init__(self, nbArms, lower=0., amplitude=1.): \"\"\" Will fail directly if the bridge", "julia file.\") # WARNING try: self._index_function([1], [1], 1, 1) except (RuntimeError, ValueError): raise", "fail directly if the bridge with julia is unavailable or buggy.\"\"\" super(UCBjulia, self).__init__(nbArms,", "as e: print(\"Error: unable to load the 'julia' Python module. Install with 'pip", "the 1-based indexes in Julia. The rest works pretty well! return self._index_function(self.rewards, self.pulls,", "bug on importing this policy from sys import path from os.path import dirname", "-*- coding: utf-8 -*- \"\"\" The UCB policy for bounded bandits, with UCB", "= julia.Julia() try: self._index_function = _j.evalfile(\"Policies/UCBjulia.jl\") except RuntimeError: try: self._index_function = _j.evalfile(\"UCBjulia.jl\") except", "loaded from 'UCBjulia.jl' is bugged or unavailable.\") # WARNING def computeIndex(self, arm): r\"\"\"", "Julia function to speed up computations is basically useless. A naive benchmark showed", "DEBUG raise e _j = julia.Julia() try: self._index_function = _j.evalfile(\"Policies/UCBjulia.jl\") except RuntimeError: try:", "not speed up anything, as there is a lot of overhead in the", ":math:`N_k(t)` pulls of arm k: .. math:: I_k(t) = \\frac{X_k(t)}{N_k(t)} + \\sqrt{\\frac{2 \\log(t)}{N_k(t)}}.", "raise ValueError(\"Error: Unable to load 'UCBjulia.jl' julia file.\") # WARNING try: self._index_function([1], [1],", "with 'pip install julia', or see https://github.com/JuliaPy/pyjulia/\") # DEBUG raise e _j =", "The UCB policy for bounded bandits, with UCB indexes computed with Julia. Reference:", "WARNING: this is a HUGE hack to fix a mystery bug on importing", "class UCBjulia(IndexPolicy): \"\"\" The UCB policy for bounded bandits, with UCB indexes computed" ]
[ "of correlated events required to allow a particular event through the filter 'max_cluster_size':", "\"\"\"Test of pmd_consumer functionality, with a selection of data.\"\"\" from os.path import join,", "from async_cv.play_file import play_file from async_cv.event_processing.pmd_consumer import pmd_consumer data_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' annot_root =", "run_name = setting+f'{group}_run_{test:02d}' data_path = join(expanduser('~\\\\'), data_root, join( group, files[group]['boat_tests'][test])) annot_path = join(expanduser('~\\\\'),", "33, pmd_consumer, run_name=run_name, video_out=True, targets=['vessel', 'boat', 'RHIB'], annot_file=annot_path, show_metrics=False, parameters=parameters ) def run_group(group,", "6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.xml' }, 'data_format': '.aedat4' }, 'june_26': { 'boat_tests': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.aedat4',", "events from buffer 'buffer_flush_period': 20_000, 'num_analyzers': 32, 'sample_period': 100_000, # microseconds between each", "setting=''): run_name = setting+f'{group}_run_{test:02d}' data_path = join(expanduser('~\\\\'), data_root, join( group, files[group]['boat_tests'][test])) annot_path =", "2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.xml', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.xml', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.xml', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.xml', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.xml', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.xml'", "'short_duration': 2_000_000, #3_000_000, 'detection_tau': -0.002, 'ratio_threshold': 0, 'dot_ratio_threshold': 1.0, 'ratio_stability_factor': 1.0, 'dot_ratio_stability_factor': factor,", "100_000, # number of events to remember for each (x, y) position 'event_buffer_depth':", "'.aedat4' }, 'june_26': { 'boat_tests': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.aedat4', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.aedat4', # 4:", "#5_000_000, 'short_duration': 2_000_000, #3_000_000, 'detection_tau': -0.002, 'ratio_threshold': 0, 'dot_ratio_threshold': 1.0, 'ratio_stability_factor': 1.0, 'dot_ratio_stability_factor':", "import play_file from async_cv.event_processing.pmd_consumer import pmd_consumer data_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' annot_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' files", "'75mm-2000us-filter-boat-2021_04_12_15_16_43.aedat4', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24.aedat4', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01.aedat4' }, 'annotations': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24-2021_06_03_18_58_28-cvat+for+video+1.1.xml', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47-2021_06_03_21_30_33-cvat+for+video+1.1.xml',", ") def run_group(group, setting=''): for test in files[group]['boat_tests'].keys(): run_one(group, test, setting) def run_all(setting=''):", "'us_per_event': 50, # processing time alloted to each event handler to process events", "# how far back in time to consider events for filtering 'tc': 200_000,", "parameters parameters = { 'x_div': 4, # number of horizontal divisions 'y_div': 4,", "remember for each (x, y) position 'event_buffer_depth': 8, 'tf': 200_000, # how far", "-0.002, 'ratio_threshold': 0, 'dot_ratio_threshold': 1.0, 'ratio_stability_factor': 1.0, 'dot_ratio_stability_factor': factor, } run_group('june_12', f'{factor:03}/') run_group('june_26',", "targets=['vessel', 'boat', 'RHIB'], annot_file=annot_path, show_metrics=False, parameters=parameters ) def run_group(group, setting=''): for test in", "expanduser from async_cv.play_file import play_file from async_cv.event_processing.pmd_consumer import pmd_consumer data_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' annot_root", "= 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' annot_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' files = { 'june_12': { 'boat_tests': { 2:", "filtering 'tc': 200_000, # how far back in time to consider events for", "4: 'out_2021-04-29_18-04-41.raw', # 5: 'out_2021-04-29_18-06-47.raw', # 6: 'out_2021-04-29_18-10-59.raw', # 7: 'out_2021-04-29_18-17-21.raw', # 8:", "show_metrics=False, parameters=parameters ) def run_group(group, setting=''): for test in files[group]['boat_tests'].keys(): run_one(group, test, setting)", "4: '75mm-2000us-boat3-2021_04_12_15_30_50.aedat4', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43.aedat4', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24.aedat4', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01.aedat4' }, 'annotations': { 0:", "to consider events for filtering 'tc': 200_000, # how far back in time", "factor, } run_group('june_12', f'{factor:03}/') run_group('june_26', f'{factor:03}/') run_group('april_12', f'{factor:03}/') # run_all() # run_one('june_12', 6)", "{ 'boat_tests': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24.aedat4', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47.aedat4', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24.aedat4', 3: '75mm-2000us-boat2-2021_04_12_15_21_16.aedat4', 4: '75mm-2000us-boat3-2021_04_12_15_30_50.aedat4',", "position 'event_buffer_depth': 8, 'tf': 200_000, # how far back in time to consider", "data.\"\"\" from os.path import join, expanduser from async_cv.play_file import play_file from async_cv.event_processing.pmd_consumer import", "for factor in range(0, 1010, 10): # Define PMD parameters parameters = {", "#3_000_000, 'detection_tau': -0.002, 'ratio_threshold': 0, 'dot_ratio_threshold': 1.0, 'ratio_stability_factor': 1.0, 'dot_ratio_stability_factor': factor, } run_group('june_12',", "play_file from async_cv.event_processing.pmd_consumer import pmd_consumer data_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' annot_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' files =", "setting+f'{group}_run_{test:02d}' data_path = join(expanduser('~\\\\'), data_root, join( group, files[group]['boat_tests'][test])) annot_path = join(expanduser('~\\\\'), annot_root, join(", "'annotations': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24-2021_06_03_18_58_28-cvat+for+video+1.1.xml', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47-2021_06_03_21_30_33-cvat+for+video+1.1.xml', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24-2021_06_03_21_50_58-cvat+for+video+1.1.xml', 3: '75mm-2000us-boat2-2021_04_12_15_21_16-2021_06_03_22_21_59-cvat+for+video+1.1.xml', 4: '75mm-2000us-boat3-2021_04_12_15_30_50-2021_06_03_22_55_50-cvat+for+video+1.1.xml', 5:", "2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24-2021_06_03_21_50_58-cvat+for+video+1.1.xml', 3: '75mm-2000us-boat2-2021_04_12_15_21_16-2021_06_03_22_21_59-cvat+for+video+1.1.xml', 4: '75mm-2000us-boat3-2021_04_12_15_30_50-2021_06_03_22_55_50-cvat+for+video+1.1.xml', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43-2021_06_03_23_20_19-cvat+for+video+1.1.xml', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24-2021_06_03_23_26_34-cvat+for+video+1.1.xml', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01-2021_06_07_15_08_31-cvat+for+video+1.1.xml'", "'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.aedat4', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.aedat4', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.aedat4', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.aedat4', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.aedat4', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.aedat4' },", "annot_path = join(expanduser('~\\\\'), annot_root, join( group, files[group]['annotations'][test])) play_file(data_path, 33, pmd_consumer, run_name=run_name, video_out=True, targets=['vessel',", "cluster to each event # microseconds periodicity to flush expired (>tc) events from", "}, 'annotations': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.xml', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.xml', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.xml', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.xml' }, 'data_format':", "maximum taxicab dist from center of cluster to each event # microseconds periodicity", "taxicab dist from center of cluster to each event # microseconds periodicity to", "number of events to remember for each (x, y) position 'event_buffer_depth': 8, 'tf':", "3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.aedat4', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.aedat4', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.aedat4', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.aedat4', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.aedat4' }, 'annotations':", "of cluster to each event # microseconds periodicity to flush expired (>tc) events", "def run_all(setting=''): for group in files: run_group(group, setting) for factor in range(0, 1010,", "'out_2021-04-29_18-02-48.raw', # 4: 'out_2021-04-29_18-04-41.raw', # 5: 'out_2021-04-29_18-06-47.raw', # 6: 'out_2021-04-29_18-10-59.raw', # 7: 'out_2021-04-29_18-17-21.raw',", "# 'april_29': { # 1: 'out_2021-04-29_17-56-14.raw', # 2: 'out_2021-04-29_17-57-47.raw', # 3: 'out_2021-04-29_18-02-48.raw', #", "50, # processing time alloted to each event handler to process events 'temporal_filter':", "particular event through the filter 'max_cluster_size': 30, # maximum taxicab dist from center", "a particular event through the filter 'max_cluster_size': 30, # maximum taxicab dist from", "'ratio_stability_factor': 1.0, 'dot_ratio_stability_factor': factor, } run_group('june_12', f'{factor:03}/') run_group('june_26', f'{factor:03}/') run_group('april_12', f'{factor:03}/') # run_all()", "for clustering 'n': 4, # minimum number of correlated events required to allow", "5: 'out_2021-04-29_18-06-47.raw', # 6: 'out_2021-04-29_18-10-59.raw', # 7: 'out_2021-04-29_18-17-21.raw', # 8: 'out_2021-04-29_18-20-10.raw' # },", "# microseconds between each centroid position sample 'long_duration': 3_000_000, #5_000_000, 'short_duration': 2_000_000, #3_000_000,", "divisions 'us_per_event': 50, # processing time alloted to each event handler to process", "'dot_ratio_threshold': 1.0, 'ratio_stability_factor': 1.0, 'dot_ratio_stability_factor': factor, } run_group('june_12', f'{factor:03}/') run_group('june_26', f'{factor:03}/') run_group('april_12', f'{factor:03}/')", "selection of data.\"\"\" from os.path import join, expanduser from async_cv.play_file import play_file from", "5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.xml', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.xml' }, 'data_format': '.aedat4' }, 'june_26': { 'boat_tests': { #", "range(0, 1010, 10): # Define PMD parameters parameters = { 'x_div': 4, #", "2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.aedat4', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.aedat4', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.aedat4', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.aedat4', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.aedat4', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.aedat4'", "# how far back in time to consider events for clustering 'n': 4,", "'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.aedat4' }, 'annotations': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.xml', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.xml', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.xml', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.xml' },", "3: 'out_2021-04-29_18-02-48.raw', # 4: 'out_2021-04-29_18-04-41.raw', # 5: 'out_2021-04-29_18-06-47.raw', # 6: 'out_2021-04-29_18-10-59.raw', # 7:", "to consider events for clustering 'n': 4, # minimum number of correlated events", "time alloted to each event handler to process events 'temporal_filter': 100_000, # number", "}, 'annotations': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.xml', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.xml', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.xml', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.xml',", "6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24.aedat4', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01.aedat4' }, 'annotations': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24-2021_06_03_18_58_28-cvat+for+video+1.1.xml', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47-2021_06_03_21_30_33-cvat+for+video+1.1.xml', 2:", "{ 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.aedat4', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.aedat4', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.aedat4', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.aedat4' }, 'annotations': { 2:", "'25mm-1000us-speedboat-2021_04_12_15_09_24.aedat4', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47.aedat4', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24.aedat4', 3: '75mm-2000us-boat2-2021_04_12_15_21_16.aedat4', 4: '75mm-2000us-boat3-2021_04_12_15_30_50.aedat4', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43.aedat4', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24.aedat4',", "the filter 'max_cluster_size': 30, # maximum taxicab dist from center of cluster to", "# 7: '75mm-2000us-speedboat-2021_04_12_15_26_01.aedat4' }, 'annotations': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24-2021_06_03_18_58_28-cvat+for+video+1.1.xml', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47-2021_06_03_21_30_33-cvat+for+video+1.1.xml', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24-2021_06_03_21_50_58-cvat+for+video+1.1.xml', 3:", "6: 'out_2021-04-29_18-10-59.raw', # 7: 'out_2021-04-29_18-17-21.raw', # 8: 'out_2021-04-29_18-20-10.raw' # }, } def run_one(group,", "}, 'june_26': { 'boat_tests': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.aedat4', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.aedat4', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.aedat4',", "setting=''): for test in files[group]['boat_tests'].keys(): run_one(group, test, setting) def run_all(setting=''): for group in", "to flush expired (>tc) events from buffer 'buffer_flush_period': 20_000, 'num_analyzers': 32, 'sample_period': 100_000,", "'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.aedat4', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.aedat4', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.aedat4' }, 'annotations': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.xml', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.xml',", "events to remember for each (x, y) position 'event_buffer_depth': 8, 'tf': 200_000, #", "} }, # 'april_29': { # 1: 'out_2021-04-29_17-56-14.raw', # 2: 'out_2021-04-29_17-57-47.raw', # 3:", "1.0, 'ratio_stability_factor': 1.0, 'dot_ratio_stability_factor': factor, } run_group('june_12', f'{factor:03}/') run_group('june_26', f'{factor:03}/') run_group('april_12', f'{factor:03}/') #", "'temporal_filter': 100_000, # number of events to remember for each (x, y) position", "time to consider events for clustering 'n': 4, # minimum number of correlated", "# 2: 'out_2021-04-29_17-57-47.raw', # 3: 'out_2021-04-29_18-02-48.raw', # 4: 'out_2021-04-29_18-04-41.raw', # 5: 'out_2021-04-29_18-06-47.raw', #", "far back in time to consider events for clustering 'n': 4, # minimum", "6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.aedat4' }, 'annotations': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.xml', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.xml', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.xml', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.xml'", "PMD parameters parameters = { 'x_div': 4, # number of horizontal divisions 'y_div':", "(>tc) events from buffer 'buffer_flush_period': 20_000, 'num_analyzers': 32, 'sample_period': 100_000, # microseconds between", "how far back in time to consider events for filtering 'tc': 200_000, #", "position sample 'long_duration': 3_000_000, #5_000_000, 'short_duration': 2_000_000, #3_000_000, 'detection_tau': -0.002, 'ratio_threshold': 0, 'dot_ratio_threshold':", "4: '75mm-2000us-boat3-2021_04_12_15_30_50-2021_06_03_22_55_50-cvat+for+video+1.1.xml', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43-2021_06_03_23_20_19-cvat+for+video+1.1.xml', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24-2021_06_03_23_26_34-cvat+for+video+1.1.xml', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01-2021_06_07_15_08_31-cvat+for+video+1.1.xml' } }, # 'april_29':", "alloted to each event handler to process events 'temporal_filter': 100_000, # number of", "# 7: 'out_2021-04-29_18-17-21.raw', # 8: 'out_2021-04-29_18-20-10.raw' # }, } def run_one(group, test, setting=''):", "how far back in time to consider events for clustering 'n': 4, #", "minimum number of correlated events required to allow a particular event through the", "run_one(group, test, setting) def run_all(setting=''): for group in files: run_group(group, setting) for factor", "events for filtering 'tc': 200_000, # how far back in time to consider", "9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.aedat4', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.aedat4' }, 'annotations': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.xml', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.xml', #", "3: '75mm-2000us-boat2-2021_04_12_15_21_16-2021_06_03_22_21_59-cvat+for+video+1.1.xml', 4: '75mm-2000us-boat3-2021_04_12_15_30_50-2021_06_03_22_55_50-cvat+for+video+1.1.xml', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43-2021_06_03_23_20_19-cvat+for+video+1.1.xml', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24-2021_06_03_23_26_34-cvat+for+video+1.1.xml', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01-2021_06_07_15_08_31-cvat+for+video+1.1.xml' } },", "y) position 'event_buffer_depth': 8, 'tf': 200_000, # how far back in time to", "{ # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.aedat4', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.aedat4', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.aedat4', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.aedat4', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.aedat4',", "back in time to consider events for filtering 'tc': 200_000, # how far", "'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.xml', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.xml', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.xml', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.xml' }, 'data_format': '.aedat4' }, 'april_12': {", "# number of events to remember for each (x, y) position 'event_buffer_depth': 8,", "event # microseconds periodicity to flush expired (>tc) events from buffer 'buffer_flush_period': 20_000,", "factor in range(0, 1010, 10): # Define PMD parameters parameters = { 'x_div':", "to each event # microseconds periodicity to flush expired (>tc) events from buffer", "1: 'out_2021-04-29_17-56-14.raw', # 2: 'out_2021-04-29_17-57-47.raw', # 3: 'out_2021-04-29_18-02-48.raw', # 4: 'out_2021-04-29_18-04-41.raw', # 5:", "center of cluster to each event # microseconds periodicity to flush expired (>tc)", "'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.xml' }, 'data_format': '.aedat4' }, 'june_26': { 'boat_tests': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.aedat4', 3:", "'out_2021-04-29_17-56-14.raw', # 2: 'out_2021-04-29_17-57-47.raw', # 3: 'out_2021-04-29_18-02-48.raw', # 4: 'out_2021-04-29_18-04-41.raw', # 5: 'out_2021-04-29_18-06-47.raw',", "join( group, files[group]['boat_tests'][test])) annot_path = join(expanduser('~\\\\'), annot_root, join( group, files[group]['annotations'][test])) play_file(data_path, 33, pmd_consumer,", "in files: run_group(group, setting) for factor in range(0, 1010, 10): # Define PMD", "'tf': 200_000, # how far back in time to consider events for filtering", "'max_cluster_size': 30, # maximum taxicab dist from center of cluster to each event", "'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.xml', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.xml' }, 'data_format': '.aedat4' }, 'june_26': { 'boat_tests': { # 2:", "(x, y) position 'event_buffer_depth': 8, 'tf': 200_000, # how far back in time", "from center of cluster to each event # microseconds periodicity to flush expired", "in time to consider events for filtering 'tc': 200_000, # how far back", "4, # number of vertical divisions 'us_per_event': 50, # processing time alloted to", "= { 'june_12': { 'boat_tests': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.aedat4', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.aedat4', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.aedat4', 6:", "'sample_period': 100_000, # microseconds between each centroid position sample 'long_duration': 3_000_000, #5_000_000, 'short_duration':", "32, 'sample_period': 100_000, # microseconds between each centroid position sample 'long_duration': 3_000_000, #5_000_000,", "10): # Define PMD parameters parameters = { 'x_div': 4, # number of", "group, files[group]['annotations'][test])) play_file(data_path, 33, pmd_consumer, run_name=run_name, video_out=True, targets=['vessel', 'boat', 'RHIB'], annot_file=annot_path, show_metrics=False, parameters=parameters", "'april_12': { 'boat_tests': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24.aedat4', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47.aedat4', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24.aedat4', 3: '75mm-2000us-boat2-2021_04_12_15_21_16.aedat4', 4:", "each centroid position sample 'long_duration': 3_000_000, #5_000_000, 'short_duration': 2_000_000, #3_000_000, 'detection_tau': -0.002, 'ratio_threshold':", "'.aedat4' }, 'april_12': { 'boat_tests': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24.aedat4', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47.aedat4', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24.aedat4', 3:", "annot_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' files = { 'june_12': { 'boat_tests': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.aedat4', 3:", "{ 'june_12': { 'boat_tests': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.aedat4', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.aedat4', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.aedat4', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.aedat4'", "dist from center of cluster to each event # microseconds periodicity to flush", "through the filter 'max_cluster_size': 30, # maximum taxicab dist from center of cluster", "processing time alloted to each event handler to process events 'temporal_filter': 100_000, #", "vertical divisions 'us_per_event': 50, # processing time alloted to each event handler to", "0: '25mm-1000us-speedboat-2021_04_12_15_09_24.aedat4', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47.aedat4', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24.aedat4', 3: '75mm-2000us-boat2-2021_04_12_15_21_16.aedat4', 4: '75mm-2000us-boat3-2021_04_12_15_30_50.aedat4', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43.aedat4', 6:", "# maximum taxicab dist from center of cluster to each event # microseconds", "'annotations': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.xml', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.xml', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.xml', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.xml', 9:", "run_group(group, setting) for factor in range(0, 1010, 10): # Define PMD parameters parameters", "'out_2021-04-29_18-17-21.raw', # 8: 'out_2021-04-29_18-20-10.raw' # }, } def run_one(group, test, setting=''): run_name =", "to process events 'temporal_filter': 100_000, # number of events to remember for each", "0, 'dot_ratio_threshold': 1.0, 'ratio_stability_factor': 1.0, 'dot_ratio_stability_factor': factor, } run_group('june_12', f'{factor:03}/') run_group('june_26', f'{factor:03}/') run_group('april_12',", "'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.aedat4', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.aedat4' }, 'annotations': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.xml', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.xml', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.xml', 6:", "30, # maximum taxicab dist from center of cluster to each event #", "number of correlated events required to allow a particular event through the filter", "# microseconds periodicity to flush expired (>tc) events from buffer 'buffer_flush_period': 20_000, 'num_analyzers':", "# 5: 'out_2021-04-29_18-06-47.raw', # 6: 'out_2021-04-29_18-10-59.raw', # 7: 'out_2021-04-29_18-17-21.raw', # 8: 'out_2021-04-29_18-20-10.raw' #", "to allow a particular event through the filter 'max_cluster_size': 30, # maximum taxicab", "events for clustering 'n': 4, # minimum number of correlated events required to", "group, files[group]['boat_tests'][test])) annot_path = join(expanduser('~\\\\'), annot_root, join( group, files[group]['annotations'][test])) play_file(data_path, 33, pmd_consumer, run_name=run_name,", "'75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24.aedat4', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01.aedat4' }, 'annotations': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24-2021_06_03_18_58_28-cvat+for+video+1.1.xml', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47-2021_06_03_21_30_33-cvat+for+video+1.1.xml', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24-2021_06_03_21_50_58-cvat+for+video+1.1.xml',", "'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.aedat4', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.aedat4', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.aedat4' }, 'annotations': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.xml', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.xml', 5:", "files[group]['boat_tests'][test])) annot_path = join(expanduser('~\\\\'), annot_root, join( group, files[group]['annotations'][test])) play_file(data_path, 33, pmd_consumer, run_name=run_name, video_out=True,", "for filtering 'tc': 200_000, # how far back in time to consider events", "in range(0, 1010, 10): # Define PMD parameters parameters = { 'x_div': 4,", "'boat_tests': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24.aedat4', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47.aedat4', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24.aedat4', 3: '75mm-2000us-boat2-2021_04_12_15_21_16.aedat4', 4: '75mm-2000us-boat3-2021_04_12_15_30_50.aedat4', 5:", "import join, expanduser from async_cv.play_file import play_file from async_cv.event_processing.pmd_consumer import pmd_consumer data_root =", "6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.aedat4', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.aedat4', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.aedat4' }, 'annotations': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.xml', 3:", "5: '75mm-2000us-filter-boat-2021_04_12_15_16_43.aedat4', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24.aedat4', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01.aedat4' }, 'annotations': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24-2021_06_03_18_58_28-cvat+for+video+1.1.xml', 1:", "setting) def run_all(setting=''): for group in files: run_group(group, setting) for factor in range(0,", "in files[group]['boat_tests'].keys(): run_one(group, test, setting) def run_all(setting=''): for group in files: run_group(group, setting)", "'75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24-2021_06_03_23_26_34-cvat+for+video+1.1.xml', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01-2021_06_07_15_08_31-cvat+for+video+1.1.xml' } }, # 'april_29': { # 1: 'out_2021-04-29_17-56-14.raw', #", "from buffer 'buffer_flush_period': 20_000, 'num_analyzers': 32, 'sample_period': 100_000, # microseconds between each centroid", "'75mm-2000us-boat2-2021_04_12_15_21_16-2021_06_03_22_21_59-cvat+for+video+1.1.xml', 4: '75mm-2000us-boat3-2021_04_12_15_30_50-2021_06_03_22_55_50-cvat+for+video+1.1.xml', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43-2021_06_03_23_20_19-cvat+for+video+1.1.xml', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24-2021_06_03_23_26_34-cvat+for+video+1.1.xml', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01-2021_06_07_15_08_31-cvat+for+video+1.1.xml' } }, #", "with a selection of data.\"\"\" from os.path import join, expanduser from async_cv.play_file import", "'x_div': 4, # number of horizontal divisions 'y_div': 4, # number of vertical", "clustering 'n': 4, # minimum number of correlated events required to allow a", "1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47.aedat4', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24.aedat4', 3: '75mm-2000us-boat2-2021_04_12_15_21_16.aedat4', 4: '75mm-2000us-boat3-2021_04_12_15_30_50.aedat4', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43.aedat4', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24.aedat4', #", "pmd_consumer, run_name=run_name, video_out=True, targets=['vessel', 'boat', 'RHIB'], annot_file=annot_path, show_metrics=False, parameters=parameters ) def run_group(group, setting=''):", "for test in files[group]['boat_tests'].keys(): run_one(group, test, setting) def run_all(setting=''): for group in files:", "# 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.aedat4', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.aedat4', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.aedat4', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.aedat4', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.aedat4', 21:", "'boat', 'RHIB'], annot_file=annot_path, show_metrics=False, parameters=parameters ) def run_group(group, setting=''): for test in files[group]['boat_tests'].keys():", "'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.xml', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.xml', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.xml', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.xml', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.xml' }, 'data_format': '.aedat4'", "each event # microseconds periodicity to flush expired (>tc) events from buffer 'buffer_flush_period':", "'75mm-2000us-speedboat-2021_04_12_15_26_01.aedat4' }, 'annotations': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24-2021_06_03_18_58_28-cvat+for+video+1.1.xml', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47-2021_06_03_21_30_33-cvat+for+video+1.1.xml', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24-2021_06_03_21_50_58-cvat+for+video+1.1.xml', 3: '75mm-2000us-boat2-2021_04_12_15_21_16-2021_06_03_22_21_59-cvat+for+video+1.1.xml', 4:", "run_one(group, test, setting=''): run_name = setting+f'{group}_run_{test:02d}' data_path = join(expanduser('~\\\\'), data_root, join( group, files[group]['boat_tests'][test]))", "horizontal divisions 'y_div': 4, # number of vertical divisions 'us_per_event': 50, # processing", "7: 'out_2021-04-29_18-17-21.raw', # 8: 'out_2021-04-29_18-20-10.raw' # }, } def run_one(group, test, setting=''): run_name", "handler to process events 'temporal_filter': 100_000, # number of events to remember for", "microseconds periodicity to flush expired (>tc) events from buffer 'buffer_flush_period': 20_000, 'num_analyzers': 32,", "events 'temporal_filter': 100_000, # number of events to remember for each (x, y)", "21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.aedat4' }, 'annotations': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.xml', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.xml', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.xml',", "allow a particular event through the filter 'max_cluster_size': 30, # maximum taxicab dist", "microseconds between each centroid position sample 'long_duration': 3_000_000, #5_000_000, 'short_duration': 2_000_000, #3_000_000, 'detection_tau':", "'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.xml', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.xml', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.xml', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.xml', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.xml', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.xml' },", "event handler to process events 'temporal_filter': 100_000, # number of events to remember", "3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.xml', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.xml', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.xml', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.xml', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.xml' }, 'data_format':", "{ 0: '25mm-1000us-speedboat-2021_04_12_15_09_24.aedat4', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47.aedat4', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24.aedat4', 3: '75mm-2000us-boat2-2021_04_12_15_21_16.aedat4', 4: '75mm-2000us-boat3-2021_04_12_15_30_50.aedat4', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43.aedat4',", "200_000, # how far back in time to consider events for clustering 'n':", "join(expanduser('~\\\\'), annot_root, join( group, files[group]['annotations'][test])) play_file(data_path, 33, pmd_consumer, run_name=run_name, video_out=True, targets=['vessel', 'boat', 'RHIB'],", "} def run_one(group, test, setting=''): run_name = setting+f'{group}_run_{test:02d}' data_path = join(expanduser('~\\\\'), data_root, join(", "0: '25mm-1000us-speedboat-2021_04_12_15_09_24-2021_06_03_18_58_28-cvat+for+video+1.1.xml', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47-2021_06_03_21_30_33-cvat+for+video+1.1.xml', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24-2021_06_03_21_50_58-cvat+for+video+1.1.xml', 3: '75mm-2000us-boat2-2021_04_12_15_21_16-2021_06_03_22_21_59-cvat+for+video+1.1.xml', 4: '75mm-2000us-boat3-2021_04_12_15_30_50-2021_06_03_22_55_50-cvat+for+video+1.1.xml', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43-2021_06_03_23_20_19-cvat+for+video+1.1.xml', 6:", "test, setting) def run_all(setting=''): for group in files: run_group(group, setting) for factor in", "'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.aedat4', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.aedat4', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.aedat4', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.aedat4', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.aedat4' }, 'annotations': {", "of pmd_consumer functionality, with a selection of data.\"\"\" from os.path import join, expanduser", "files[group]['annotations'][test])) play_file(data_path, 33, pmd_consumer, run_name=run_name, video_out=True, targets=['vessel', 'boat', 'RHIB'], annot_file=annot_path, show_metrics=False, parameters=parameters )", "each event handler to process events 'temporal_filter': 100_000, # number of events to", "20_000, 'num_analyzers': 32, 'sample_period': 100_000, # microseconds between each centroid position sample 'long_duration':", "{ 'x_div': 4, # number of horizontal divisions 'y_div': 4, # number of", "consider events for clustering 'n': 4, # minimum number of correlated events required", "'RHIB'], annot_file=annot_path, show_metrics=False, parameters=parameters ) def run_group(group, setting=''): for test in files[group]['boat_tests'].keys(): run_one(group,", "run_name=run_name, video_out=True, targets=['vessel', 'boat', 'RHIB'], annot_file=annot_path, show_metrics=False, parameters=parameters ) def run_group(group, setting=''): for", "1010, 10): # Define PMD parameters parameters = { 'x_div': 4, # number", "2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.xml', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.xml', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.xml', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.xml' }, 'data_format': '.aedat4' }, 'june_26':", "events required to allow a particular event through the filter 'max_cluster_size': 30, #", "'75mm-2000us-boat3-2021_04_12_15_30_50-2021_06_03_22_55_50-cvat+for+video+1.1.xml', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43-2021_06_03_23_20_19-cvat+for+video+1.1.xml', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24-2021_06_03_23_26_34-cvat+for+video+1.1.xml', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01-2021_06_07_15_08_31-cvat+for+video+1.1.xml' } }, # 'april_29': {", "play_file(data_path, 33, pmd_consumer, run_name=run_name, video_out=True, targets=['vessel', 'boat', 'RHIB'], annot_file=annot_path, show_metrics=False, parameters=parameters ) def", "4, # minimum number of correlated events required to allow a particular event", "async_cv.event_processing.pmd_consumer import pmd_consumer data_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' annot_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' files = { 'june_12':", "'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' annot_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' files = { 'june_12': { 'boat_tests': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.aedat4',", "'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.aedat4' }, 'annotations': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.xml', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.xml', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.xml', 6:", "of horizontal divisions 'y_div': 4, # number of vertical divisions 'us_per_event': 50, #", "of vertical divisions 'us_per_event': 50, # processing time alloted to each event handler", "divisions 'y_div': 4, # number of vertical divisions 'us_per_event': 50, # processing time", "200_000, # how far back in time to consider events for filtering 'tc':", "'data_format': '.aedat4' }, 'april_12': { 'boat_tests': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24.aedat4', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47.aedat4', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24.aedat4',", "= join(expanduser('~\\\\'), annot_root, join( group, files[group]['annotations'][test])) play_file(data_path, 33, pmd_consumer, run_name=run_name, video_out=True, targets=['vessel', 'boat',", "# }, } def run_one(group, test, setting=''): run_name = setting+f'{group}_run_{test:02d}' data_path = join(expanduser('~\\\\'),", "'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' files = { 'june_12': { 'boat_tests': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.aedat4', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.aedat4', 5:", "4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.aedat4', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.aedat4', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.aedat4', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.aedat4' }, 'annotations': { # 2:", "'dot_ratio_stability_factor': factor, } run_group('june_12', f'{factor:03}/') run_group('june_26', f'{factor:03}/') run_group('april_12', f'{factor:03}/') # run_all() # run_one('june_12',", "pmd_consumer data_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' annot_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' files = { 'june_12': { 'boat_tests':", "files[group]['boat_tests'].keys(): run_one(group, test, setting) def run_all(setting=''): for group in files: run_group(group, setting) for", "to remember for each (x, y) position 'event_buffer_depth': 8, 'tf': 200_000, # how", "'out_2021-04-29_18-06-47.raw', # 6: 'out_2021-04-29_18-10-59.raw', # 7: 'out_2021-04-29_18-17-21.raw', # 8: 'out_2021-04-29_18-20-10.raw' # }, }", "periodicity to flush expired (>tc) events from buffer 'buffer_flush_period': 20_000, 'num_analyzers': 32, 'sample_period':", "# 8: 'out_2021-04-29_18-20-10.raw' # }, } def run_one(group, test, setting=''): run_name = setting+f'{group}_run_{test:02d}'", "'n': 4, # minimum number of correlated events required to allow a particular", "}, 'data_format': '.aedat4' }, 'april_12': { 'boat_tests': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24.aedat4', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47.aedat4', 2:", "4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.xml', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.xml', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.xml', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.xml' }, 'data_format': '.aedat4' }, 'april_12':", "for each (x, y) position 'event_buffer_depth': 8, 'tf': 200_000, # how far back", "run_all(setting=''): for group in files: run_group(group, setting) for factor in range(0, 1010, 10):", "'annotations': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.xml', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.xml', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.xml', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.xml' }, 'data_format': '.aedat4'", "'out_2021-04-29_17-57-47.raw', # 3: 'out_2021-04-29_18-02-48.raw', # 4: 'out_2021-04-29_18-04-41.raw', # 5: 'out_2021-04-29_18-06-47.raw', # 6: 'out_2021-04-29_18-10-59.raw',", "'out_2021-04-29_18-04-41.raw', # 5: 'out_2021-04-29_18-06-47.raw', # 6: 'out_2021-04-29_18-10-59.raw', # 7: 'out_2021-04-29_18-17-21.raw', # 8: 'out_2021-04-29_18-20-10.raw'", "each (x, y) position 'event_buffer_depth': 8, 'tf': 200_000, # how far back in", "{ 'boat_tests': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.aedat4', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.aedat4', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.aedat4', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.aedat4' }, 'annotations':", "'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.xml', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.xml' }, 'data_format': '.aedat4' }, 'april_12': { 'boat_tests': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24.aedat4',", "for group in files: run_group(group, setting) for factor in range(0, 1010, 10): #", "between each centroid position sample 'long_duration': 3_000_000, #5_000_000, 'short_duration': 2_000_000, #3_000_000, 'detection_tau': -0.002,", "'ratio_threshold': 0, 'dot_ratio_threshold': 1.0, 'ratio_stability_factor': 1.0, 'dot_ratio_stability_factor': factor, } run_group('june_12', f'{factor:03}/') run_group('june_26', f'{factor:03}/')", "of events to remember for each (x, y) position 'event_buffer_depth': 8, 'tf': 200_000,", "back in time to consider events for clustering 'n': 4, # minimum number", "100_000, # microseconds between each centroid position sample 'long_duration': 3_000_000, #5_000_000, 'short_duration': 2_000_000,", "a selection of data.\"\"\" from os.path import join, expanduser from async_cv.play_file import play_file", "in time to consider events for clustering 'n': 4, # minimum number of", "# number of vertical divisions 'us_per_event': 50, # processing time alloted to each", "'75mm-2000us-speedboat-2021_04_12_15_26_01-2021_06_07_15_08_31-cvat+for+video+1.1.xml' } }, # 'april_29': { # 1: 'out_2021-04-29_17-56-14.raw', # 2: 'out_2021-04-29_17-57-47.raw', #", "test in files[group]['boat_tests'].keys(): run_one(group, test, setting) def run_all(setting=''): for group in files: run_group(group,", "data_root, join( group, files[group]['boat_tests'][test])) annot_path = join(expanduser('~\\\\'), annot_root, join( group, files[group]['annotations'][test])) play_file(data_path, 33,", "'tc': 200_000, # how far back in time to consider events for clustering", "async_cv.play_file import play_file from async_cv.event_processing.pmd_consumer import pmd_consumer data_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' annot_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\'", "4, # number of horizontal divisions 'y_div': 4, # number of vertical divisions", "6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24-2021_06_03_23_26_34-cvat+for+video+1.1.xml', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01-2021_06_07_15_08_31-cvat+for+video+1.1.xml' } }, # 'april_29': { # 1: 'out_2021-04-29_17-56-14.raw',", "'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.xml', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.xml', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.xml' }, 'data_format': '.aedat4' }, 'june_26': { 'boat_tests': {", "1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47-2021_06_03_21_30_33-cvat+for+video+1.1.xml', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24-2021_06_03_21_50_58-cvat+for+video+1.1.xml', 3: '75mm-2000us-boat2-2021_04_12_15_21_16-2021_06_03_22_21_59-cvat+for+video+1.1.xml', 4: '75mm-2000us-boat3-2021_04_12_15_30_50-2021_06_03_22_55_50-cvat+for+video+1.1.xml', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43-2021_06_03_23_20_19-cvat+for+video+1.1.xml', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24-2021_06_03_23_26_34-cvat+for+video+1.1.xml', #", "6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.xml', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.xml', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.xml' }, 'data_format': '.aedat4' }, 'april_12': { 'boat_tests':", "number of vertical divisions 'us_per_event': 50, # processing time alloted to each event", "to each event handler to process events 'temporal_filter': 100_000, # number of events", "8: 'out_2021-04-29_18-20-10.raw' # }, } def run_one(group, test, setting=''): run_name = setting+f'{group}_run_{test:02d}' data_path", "video_out=True, targets=['vessel', 'boat', 'RHIB'], annot_file=annot_path, show_metrics=False, parameters=parameters ) def run_group(group, setting=''): for test", "2: 'out_2021-04-29_17-57-47.raw', # 3: 'out_2021-04-29_18-02-48.raw', # 4: 'out_2021-04-29_18-04-41.raw', # 5: 'out_2021-04-29_18-06-47.raw', # 6:", "data_path = join(expanduser('~\\\\'), data_root, join( group, files[group]['boat_tests'][test])) annot_path = join(expanduser('~\\\\'), annot_root, join( group,", "3: '75mm-2000us-boat2-2021_04_12_15_21_16.aedat4', 4: '75mm-2000us-boat3-2021_04_12_15_30_50.aedat4', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43.aedat4', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24.aedat4', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01.aedat4' }, 'annotations':", "from async_cv.event_processing.pmd_consumer import pmd_consumer data_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' annot_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' files = {", "= { 'x_div': 4, # number of horizontal divisions 'y_div': 4, # number", "2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24.aedat4', 3: '75mm-2000us-boat2-2021_04_12_15_21_16.aedat4', 4: '75mm-2000us-boat3-2021_04_12_15_30_50.aedat4', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43.aedat4', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24.aedat4', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01.aedat4'", "def run_group(group, setting=''): for test in files[group]['boat_tests'].keys(): run_one(group, test, setting) def run_all(setting=''): for", "buffer 'buffer_flush_period': 20_000, 'num_analyzers': 32, 'sample_period': 100_000, # microseconds between each centroid position", "'num_analyzers': 32, 'sample_period': 100_000, # microseconds between each centroid position sample 'long_duration': 3_000_000,", "import pmd_consumer data_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' annot_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' files = { 'june_12': {", "2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.aedat4', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.aedat4', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.aedat4', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.aedat4' }, 'annotations': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.xml',", "test, setting=''): run_name = setting+f'{group}_run_{test:02d}' data_path = join(expanduser('~\\\\'), data_root, join( group, files[group]['boat_tests'][test])) annot_path", "5: '75mm-2000us-filter-boat-2021_04_12_15_16_43-2021_06_03_23_20_19-cvat+for+video+1.1.xml', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24-2021_06_03_23_26_34-cvat+for+video+1.1.xml', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01-2021_06_07_15_08_31-cvat+for+video+1.1.xml' } }, # 'april_29': { #", "far back in time to consider events for filtering 'tc': 200_000, # how", "'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.aedat4', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.aedat4', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.aedat4', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.aedat4' }, 'annotations': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.xml', 3:", "def run_one(group, test, setting=''): run_name = setting+f'{group}_run_{test:02d}' data_path = join(expanduser('~\\\\'), data_root, join( group,", "group in files: run_group(group, setting) for factor in range(0, 1010, 10): # Define", "parameters=parameters ) def run_group(group, setting=''): for test in files[group]['boat_tests'].keys(): run_one(group, test, setting) def", "# minimum number of correlated events required to allow a particular event through", "expired (>tc) events from buffer 'buffer_flush_period': 20_000, 'num_analyzers': 32, 'sample_period': 100_000, # microseconds", "'detection_tau': -0.002, 'ratio_threshold': 0, 'dot_ratio_threshold': 1.0, 'ratio_stability_factor': 1.0, 'dot_ratio_stability_factor': factor, } run_group('june_12', f'{factor:03}/')", "'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.xml', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.xml', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.xml', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.xml' }, 'data_format': '.aedat4' }, 'june_26': {", "# number of horizontal divisions 'y_div': 4, # number of vertical divisions 'us_per_event':", "join, expanduser from async_cv.play_file import play_file from async_cv.event_processing.pmd_consumer import pmd_consumer data_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\'", "of data.\"\"\" from os.path import join, expanduser from async_cv.play_file import play_file from async_cv.event_processing.pmd_consumer", "# Define PMD parameters parameters = { 'x_div': 4, # number of horizontal", "annot_file=annot_path, show_metrics=False, parameters=parameters ) def run_group(group, setting=''): for test in files[group]['boat_tests'].keys(): run_one(group, test,", "'boat_tests': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.aedat4', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.aedat4', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.aedat4', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.aedat4' }, 'annotations': {", "# 3: 'out_2021-04-29_18-02-48.raw', # 4: 'out_2021-04-29_18-04-41.raw', # 5: 'out_2021-04-29_18-06-47.raw', # 6: 'out_2021-04-29_18-10-59.raw', #", "3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.xml', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.xml', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.xml' }, 'data_format': '.aedat4' }, 'june_26': { 'boat_tests':", "{ # 1: 'out_2021-04-29_17-56-14.raw', # 2: 'out_2021-04-29_17-57-47.raw', # 3: 'out_2021-04-29_18-02-48.raw', # 4: 'out_2021-04-29_18-04-41.raw',", "'25mm-1000us-speedboat-2021_04_12_15_09_24-2021_06_03_18_58_28-cvat+for+video+1.1.xml', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47-2021_06_03_21_30_33-cvat+for+video+1.1.xml', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24-2021_06_03_21_50_58-cvat+for+video+1.1.xml', 3: '75mm-2000us-boat2-2021_04_12_15_21_16-2021_06_03_22_21_59-cvat+for+video+1.1.xml', 4: '75mm-2000us-boat3-2021_04_12_15_30_50-2021_06_03_22_55_50-cvat+for+video+1.1.xml', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43-2021_06_03_23_20_19-cvat+for+video+1.1.xml', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24-2021_06_03_23_26_34-cvat+for+video+1.1.xml',", "os.path import join, expanduser from async_cv.play_file import play_file from async_cv.event_processing.pmd_consumer import pmd_consumer data_root", "data_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' annot_root = 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' files = { 'june_12': { 'boat_tests': {", "'25mm-1200us-drifting-boat-2021_04_12_15_33_47-2021_06_03_21_30_33-cvat+for+video+1.1.xml', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24-2021_06_03_21_50_58-cvat+for+video+1.1.xml', 3: '75mm-2000us-boat2-2021_04_12_15_21_16-2021_06_03_22_21_59-cvat+for+video+1.1.xml', 4: '75mm-2000us-boat3-2021_04_12_15_30_50-2021_06_03_22_55_50-cvat+for+video+1.1.xml', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43-2021_06_03_23_20_19-cvat+for+video+1.1.xml', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24-2021_06_03_23_26_34-cvat+for+video+1.1.xml', # 7:", "# 1: 'out_2021-04-29_17-56-14.raw', # 2: 'out_2021-04-29_17-57-47.raw', # 3: 'out_2021-04-29_18-02-48.raw', # 4: 'out_2021-04-29_18-04-41.raw', #", "number of horizontal divisions 'y_div': 4, # number of vertical divisions 'us_per_event': 50,", "'75mm-2000us-boat2-2021_04_12_15_21_16.aedat4', 4: '75mm-2000us-boat3-2021_04_12_15_30_50.aedat4', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43.aedat4', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24.aedat4', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01.aedat4' }, 'annotations': {", "'long_duration': 3_000_000, #5_000_000, 'short_duration': 2_000_000, #3_000_000, 'detection_tau': -0.002, 'ratio_threshold': 0, 'dot_ratio_threshold': 1.0, 'ratio_stability_factor':", "join(expanduser('~\\\\'), data_root, join( group, files[group]['boat_tests'][test])) annot_path = join(expanduser('~\\\\'), annot_root, join( group, files[group]['annotations'][test])) play_file(data_path,", "'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.xml', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.xml', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.xml' }, 'data_format': '.aedat4' }, 'april_12': { 'boat_tests': {", "Define PMD parameters parameters = { 'x_div': 4, # number of horizontal divisions", "'june_26': { 'boat_tests': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.aedat4', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.aedat4', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.aedat4', 6:", "correlated events required to allow a particular event through the filter 'max_cluster_size': 30,", "}, 'annotations': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24-2021_06_03_18_58_28-cvat+for+video+1.1.xml', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47-2021_06_03_21_30_33-cvat+for+video+1.1.xml', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24-2021_06_03_21_50_58-cvat+for+video+1.1.xml', 3: '75mm-2000us-boat2-2021_04_12_15_21_16-2021_06_03_22_21_59-cvat+for+video+1.1.xml', 4: '75mm-2000us-boat3-2021_04_12_15_30_50-2021_06_03_22_55_50-cvat+for+video+1.1.xml',", "files: run_group(group, setting) for factor in range(0, 1010, 10): # Define PMD parameters", "'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.aedat4', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.aedat4' }, 'annotations': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.xml', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.xml', # 4:", "# 7: '75mm-2000us-speedboat-2021_04_12_15_26_01-2021_06_07_15_08_31-cvat+for+video+1.1.xml' } }, # 'april_29': { # 1: 'out_2021-04-29_17-56-14.raw', # 2:", "'april_29': { # 1: 'out_2021-04-29_17-56-14.raw', # 2: 'out_2021-04-29_17-57-47.raw', # 3: 'out_2021-04-29_18-02-48.raw', # 4:", "= setting+f'{group}_run_{test:02d}' data_path = join(expanduser('~\\\\'), data_root, join( group, files[group]['boat_tests'][test])) annot_path = join(expanduser('~\\\\'), annot_root,", "= 'OneDrive\\\\Documents\\\\NIWC\\\\NeuroComp\\\\boat_tests\\\\' files = { 'june_12': { 'boat_tests': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.aedat4', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.aedat4',", "run_group(group, setting=''): for test in files[group]['boat_tests'].keys(): run_one(group, test, setting) def run_all(setting=''): for group", "}, 'data_format': '.aedat4' }, 'june_26': { 'boat_tests': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.aedat4', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.aedat4',", "{ 'boat_tests': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.aedat4', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.aedat4', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.aedat4', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.aedat4',", "process events 'temporal_filter': 100_000, # number of events to remember for each (x,", "'june_12': { 'boat_tests': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.aedat4', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.aedat4', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.aedat4', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.aedat4' },", "annot_root, join( group, files[group]['annotations'][test])) play_file(data_path, 33, pmd_consumer, run_name=run_name, video_out=True, targets=['vessel', 'boat', 'RHIB'], annot_file=annot_path,", "'data_format': '.aedat4' }, 'june_26': { 'boat_tests': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.aedat4', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.aedat4', #", "'buffer_flush_period': 20_000, 'num_analyzers': 32, 'sample_period': 100_000, # microseconds between each centroid position sample", "'boat_tests': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.aedat4', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.aedat4', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.aedat4', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.aedat4', 9:", "7: '75mm-2000us-speedboat-2021_04_12_15_26_01-2021_06_07_15_08_31-cvat+for+video+1.1.xml' } }, # 'april_29': { # 1: 'out_2021-04-29_17-56-14.raw', # 2: 'out_2021-04-29_17-57-47.raw',", "{ # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.xml', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.xml', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.xml', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.xml', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.xml',", "# 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.xml', 3: 'Davis346red-2020-06-26T12-27-39-0700-00000195-0_Test_3.xml', # 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.xml', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.xml', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.xml', 21:", "21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.xml' }, 'data_format': '.aedat4' }, 'april_12': { 'boat_tests': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24.aedat4', 1:", "{ 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.xml', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.xml', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.xml', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.xml' }, 'data_format': '.aedat4' },", "centroid position sample 'long_duration': 3_000_000, #5_000_000, 'short_duration': 2_000_000, #3_000_000, 'detection_tau': -0.002, 'ratio_threshold': 0,", "# 6: 'out_2021-04-29_18-10-59.raw', # 7: 'out_2021-04-29_18-17-21.raw', # 8: 'out_2021-04-29_18-20-10.raw' # }, } def", "}, } def run_one(group, test, setting=''): run_name = setting+f'{group}_run_{test:02d}' data_path = join(expanduser('~\\\\'), data_root,", "2_000_000, #3_000_000, 'detection_tau': -0.002, 'ratio_threshold': 0, 'dot_ratio_threshold': 1.0, 'ratio_stability_factor': 1.0, 'dot_ratio_stability_factor': factor, }", "{ 0: '25mm-1000us-speedboat-2021_04_12_15_09_24-2021_06_03_18_58_28-cvat+for+video+1.1.xml', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47-2021_06_03_21_30_33-cvat+for+video+1.1.xml', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24-2021_06_03_21_50_58-cvat+for+video+1.1.xml', 3: '75mm-2000us-boat2-2021_04_12_15_21_16-2021_06_03_22_21_59-cvat+for+video+1.1.xml', 4: '75mm-2000us-boat3-2021_04_12_15_30_50-2021_06_03_22_55_50-cvat+for+video+1.1.xml', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43-2021_06_03_23_20_19-cvat+for+video+1.1.xml',", "event through the filter 'max_cluster_size': 30, # maximum taxicab dist from center of", "}, 'april_12': { 'boat_tests': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24.aedat4', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47.aedat4', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24.aedat4', 3: '75mm-2000us-boat2-2021_04_12_15_21_16.aedat4',", "9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.xml', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.xml' }, 'data_format': '.aedat4' }, 'april_12': { 'boat_tests': { 0:", "7: '75mm-2000us-speedboat-2021_04_12_15_26_01.aedat4' }, 'annotations': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24-2021_06_03_18_58_28-cvat+for+video+1.1.xml', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47-2021_06_03_21_30_33-cvat+for+video+1.1.xml', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24-2021_06_03_21_50_58-cvat+for+video+1.1.xml', 3: '75mm-2000us-boat2-2021_04_12_15_21_16-2021_06_03_22_21_59-cvat+for+video+1.1.xml',", "filter 'max_cluster_size': 30, # maximum taxicab dist from center of cluster to each", "files = { 'june_12': { 'boat_tests': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.aedat4', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.aedat4', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.aedat4',", "8, 'tf': 200_000, # how far back in time to consider events for", "pmd_consumer functionality, with a selection of data.\"\"\" from os.path import join, expanduser from", "'75mm-1500us-drifting-boat-2021_04_12_15_35_24-2021_06_03_21_50_58-cvat+for+video+1.1.xml', 3: '75mm-2000us-boat2-2021_04_12_15_21_16-2021_06_03_22_21_59-cvat+for+video+1.1.xml', 4: '75mm-2000us-boat3-2021_04_12_15_30_50-2021_06_03_22_55_50-cvat+for+video+1.1.xml', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43-2021_06_03_23_20_19-cvat+for+video+1.1.xml', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24-2021_06_03_23_26_34-cvat+for+video+1.1.xml', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01-2021_06_07_15_08_31-cvat+for+video+1.1.xml' }", "parameters = { 'x_div': 4, # number of horizontal divisions 'y_div': 4, #", "3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.aedat4', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.aedat4', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.aedat4' }, 'annotations': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.xml', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.xml',", "1.0, 'dot_ratio_stability_factor': factor, } run_group('june_12', f'{factor:03}/') run_group('june_26', f'{factor:03}/') run_group('april_12', f'{factor:03}/') # run_all() #", "consider events for filtering 'tc': 200_000, # how far back in time to", "'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.aedat4', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.aedat4', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.aedat4', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.aedat4' }, 'annotations': { # 2: 'Davis346red-2020-06-26T12-26-42-0700-00000195-0_Test_2.xml',", "# processing time alloted to each event handler to process events 'temporal_filter': 100_000,", "'out_2021-04-29_18-10-59.raw', # 7: 'out_2021-04-29_18-17-21.raw', # 8: 'out_2021-04-29_18-20-10.raw' # }, } def run_one(group, test,", "'75mm-1500us-drifting-boat-2021_04_12_15_35_24.aedat4', 3: '75mm-2000us-boat2-2021_04_12_15_21_16.aedat4', 4: '75mm-2000us-boat3-2021_04_12_15_30_50.aedat4', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43.aedat4', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24.aedat4', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01.aedat4' },", "join( group, files[group]['annotations'][test])) play_file(data_path, 33, pmd_consumer, run_name=run_name, video_out=True, targets=['vessel', 'boat', 'RHIB'], annot_file=annot_path, show_metrics=False,", "# 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.aedat4', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.aedat4', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.aedat4', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.aedat4' }, 'annotations': { #", "# 4: 'out_2021-04-29_18-04-41.raw', # 5: 'out_2021-04-29_18-06-47.raw', # 6: 'out_2021-04-29_18-10-59.raw', # 7: 'out_2021-04-29_18-17-21.raw', #", "sample 'long_duration': 3_000_000, #5_000_000, 'short_duration': 2_000_000, #3_000_000, 'detection_tau': -0.002, 'ratio_threshold': 0, 'dot_ratio_threshold': 1.0,", "'y_div': 4, # number of vertical divisions 'us_per_event': 50, # processing time alloted", "# 4: 'Davis346red-2020-06-26T12-28-38-0700-00000195-0_Test_4.xml', 6: 'Davis346red-2020-06-26T12-30-20-0700-00000195-0_Test_6.xml', 9: 'Davis346red-2020-06-26T12-32-12-0700-00000195-0_Test_9.xml', 21: 'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.xml' }, 'data_format': '.aedat4' },", "time to consider events for filtering 'tc': 200_000, # how far back in", "3_000_000, #5_000_000, 'short_duration': 2_000_000, #3_000_000, 'detection_tau': -0.002, 'ratio_threshold': 0, 'dot_ratio_threshold': 1.0, 'ratio_stability_factor': 1.0,", "from os.path import join, expanduser from async_cv.play_file import play_file from async_cv.event_processing.pmd_consumer import pmd_consumer", "setting) for factor in range(0, 1010, 10): # Define PMD parameters parameters =", "'Davis346red-2020-06-26T13-22-40-0700-00000195-0_Test_21.xml' }, 'data_format': '.aedat4' }, 'april_12': { 'boat_tests': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24.aedat4', 1: '25mm-1200us-drifting-boat-2021_04_12_15_33_47.aedat4',", "'out_2021-04-29_18-20-10.raw' # }, } def run_one(group, test, setting=''): run_name = setting+f'{group}_run_{test:02d}' data_path =", "5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.aedat4', 6: 'Davis346red-2020-06-12T12-25-39-0700-0_Test_6.aedat4' }, 'annotations': { 2: 'Davis346red-2020-06-12T12-11-45-0700-0_Test_2.xml', 3: 'Davis346red-2020-06-12T12-15-01-0700-0_Test_3.xml', 5: 'Davis346red-2020-06-12T12-24-03-0700-0_Test_5.xml',", "'75mm-2000us-boat3-2021_04_12_15_30_50.aedat4', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43.aedat4', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24.aedat4', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01.aedat4' }, 'annotations': { 0: '25mm-1000us-speedboat-2021_04_12_15_09_24-2021_06_03_18_58_28-cvat+for+video+1.1.xml',", "flush expired (>tc) events from buffer 'buffer_flush_period': 20_000, 'num_analyzers': 32, 'sample_period': 100_000, #", "functionality, with a selection of data.\"\"\" from os.path import join, expanduser from async_cv.play_file", "required to allow a particular event through the filter 'max_cluster_size': 30, # maximum", "}, # 'april_29': { # 1: 'out_2021-04-29_17-56-14.raw', # 2: 'out_2021-04-29_17-57-47.raw', # 3: 'out_2021-04-29_18-02-48.raw',", "= join(expanduser('~\\\\'), data_root, join( group, files[group]['boat_tests'][test])) annot_path = join(expanduser('~\\\\'), annot_root, join( group, files[group]['annotations'][test]))", "'75mm-2000us-filter-boat-2021_04_12_15_16_43-2021_06_03_23_20_19-cvat+for+video+1.1.xml', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24-2021_06_03_23_26_34-cvat+for+video+1.1.xml', # 7: '75mm-2000us-speedboat-2021_04_12_15_26_01-2021_06_07_15_08_31-cvat+for+video+1.1.xml' } }, # 'april_29': { # 1:", "'event_buffer_depth': 8, 'tf': 200_000, # how far back in time to consider events", "'25mm-1200us-drifting-boat-2021_04_12_15_33_47.aedat4', 2: '75mm-1500us-drifting-boat-2021_04_12_15_35_24.aedat4', 3: '75mm-2000us-boat2-2021_04_12_15_21_16.aedat4', 4: '75mm-2000us-boat3-2021_04_12_15_30_50.aedat4', 5: '75mm-2000us-filter-boat-2021_04_12_15_16_43.aedat4', 6: '75mm-2000us-on-off-filter-boat-2021_04_12_15_17_24.aedat4', # 7:" ]
[ "= \"http://api.sowemail.com:9000\" api_key = os.environ.get('SOWEMAIL_API_KEY') request_headers = { \"Authorization\": 'Bearer {}'.format(api_key) } version", "from SoWeMail\", \"content\": [ { \"type\": \"text/plain\", \"value\": \"Simple email sending example using", "{ \"Authorization\": 'Bearer {}'.format(api_key) } version = 1 client = sowerest.Client(host=host, request_headers=request_headers, version=version)", "] } ], \"from\": { \"email\": \"<EMAIL>\" }, \"subject\": \"Hello from SoWeMail\", \"content\":", "import sowerest host = \"http://api.sowemail.com:9000\" api_key = os.environ.get('SOWEMAIL_API_KEY') request_headers = { \"Authorization\": 'Bearer", "\"email\": \"<EMAIL>\" }, \"subject\": \"Hello from SoWeMail\", \"content\": [ { \"type\": \"text/plain\", \"value\":", "'Bearer {}'.format(api_key) } version = 1 client = sowerest.Client(host=host, request_headers=request_headers, version=version) # Send", "= sowerest.Client(host=host, request_headers=request_headers, version=version) # Send email data = { \"personalizations\": [ {", "version=version) # Send email data = { \"personalizations\": [ { \"to\": [ {", "\"to\": [ { \"email\": \"<EMAIL>\" } ] } ], \"from\": { \"email\": \"<EMAIL>\"", "\"email\": \"<EMAIL>\" } ] } ], \"from\": { \"email\": \"<EMAIL>\" }, \"subject\": \"Hello", "\"<EMAIL>\" }, \"subject\": \"Hello from SoWeMail\", \"content\": [ { \"type\": \"text/plain\", \"value\": \"Simple", "}, \"subject\": \"Hello from SoWeMail\", \"content\": [ { \"type\": \"text/plain\", \"value\": \"Simple email", "\"text/plain\", \"value\": \"Simple email sending example using python's sowerest library\" } ] }", "\"value\": \"Simple email sending example using python's sowerest library\" } ] } response", "\"Simple email sending example using python's sowerest library\" } ] } response =", "api_key = os.environ.get('SOWEMAIL_API_KEY') request_headers = { \"Authorization\": 'Bearer {}'.format(api_key) } version = 1", "<gh_stars>0 import os import sowerest host = \"http://api.sowemail.com:9000\" api_key = os.environ.get('SOWEMAIL_API_KEY') request_headers =", "\"type\": \"text/plain\", \"value\": \"Simple email sending example using python's sowerest library\" } ]", "\"Authorization\": 'Bearer {}'.format(api_key) } version = 1 client = sowerest.Client(host=host, request_headers=request_headers, version=version) #", "using python's sowerest library\" } ] } response = client.mail.send.post(request_body=data) print(response.status_code) print(response.headers) print(response.body)", "= os.environ.get('SOWEMAIL_API_KEY') request_headers = { \"Authorization\": 'Bearer {}'.format(api_key) } version = 1 client", "\"Hello from SoWeMail\", \"content\": [ { \"type\": \"text/plain\", \"value\": \"Simple email sending example", "{ \"email\": \"<EMAIL>\" }, \"subject\": \"Hello from SoWeMail\", \"content\": [ { \"type\": \"text/plain\",", "{ \"email\": \"<EMAIL>\" } ] } ], \"from\": { \"email\": \"<EMAIL>\" }, \"subject\":", "sending example using python's sowerest library\" } ] } response = client.mail.send.post(request_body=data) print(response.status_code)", "} version = 1 client = sowerest.Client(host=host, request_headers=request_headers, version=version) # Send email data", "Send email data = { \"personalizations\": [ { \"to\": [ { \"email\": \"<EMAIL>\"", "host = \"http://api.sowemail.com:9000\" api_key = os.environ.get('SOWEMAIL_API_KEY') request_headers = { \"Authorization\": 'Bearer {}'.format(api_key) }", "= { \"personalizations\": [ { \"to\": [ { \"email\": \"<EMAIL>\" } ] }", "\"http://api.sowemail.com:9000\" api_key = os.environ.get('SOWEMAIL_API_KEY') request_headers = { \"Authorization\": 'Bearer {}'.format(api_key) } version =", "{ \"to\": [ { \"email\": \"<EMAIL>\" } ] } ], \"from\": { \"email\":", "[ { \"email\": \"<EMAIL>\" } ] } ], \"from\": { \"email\": \"<EMAIL>\" },", "client = sowerest.Client(host=host, request_headers=request_headers, version=version) # Send email data = { \"personalizations\": [", "\"content\": [ { \"type\": \"text/plain\", \"value\": \"Simple email sending example using python's sowerest", "1 client = sowerest.Client(host=host, request_headers=request_headers, version=version) # Send email data = { \"personalizations\":", "\"<EMAIL>\" } ] } ], \"from\": { \"email\": \"<EMAIL>\" }, \"subject\": \"Hello from", "# Send email data = { \"personalizations\": [ { \"to\": [ { \"email\":", "= 1 client = sowerest.Client(host=host, request_headers=request_headers, version=version) # Send email data = {", "{}'.format(api_key) } version = 1 client = sowerest.Client(host=host, request_headers=request_headers, version=version) # Send email", "email data = { \"personalizations\": [ { \"to\": [ { \"email\": \"<EMAIL>\" }", "sowerest.Client(host=host, request_headers=request_headers, version=version) # Send email data = { \"personalizations\": [ { \"to\":", "} ] } ], \"from\": { \"email\": \"<EMAIL>\" }, \"subject\": \"Hello from SoWeMail\",", "{ \"personalizations\": [ { \"to\": [ { \"email\": \"<EMAIL>\" } ] } ],", "= { \"Authorization\": 'Bearer {}'.format(api_key) } version = 1 client = sowerest.Client(host=host, request_headers=request_headers,", "[ { \"type\": \"text/plain\", \"value\": \"Simple email sending example using python's sowerest library\"", "import os import sowerest host = \"http://api.sowemail.com:9000\" api_key = os.environ.get('SOWEMAIL_API_KEY') request_headers = {", "sowerest host = \"http://api.sowemail.com:9000\" api_key = os.environ.get('SOWEMAIL_API_KEY') request_headers = { \"Authorization\": 'Bearer {}'.format(api_key)", "\"personalizations\": [ { \"to\": [ { \"email\": \"<EMAIL>\" } ] } ], \"from\":", "} ], \"from\": { \"email\": \"<EMAIL>\" }, \"subject\": \"Hello from SoWeMail\", \"content\": [", "request_headers=request_headers, version=version) # Send email data = { \"personalizations\": [ { \"to\": [", "], \"from\": { \"email\": \"<EMAIL>\" }, \"subject\": \"Hello from SoWeMail\", \"content\": [ {", "version = 1 client = sowerest.Client(host=host, request_headers=request_headers, version=version) # Send email data =", "example using python's sowerest library\" } ] } response = client.mail.send.post(request_body=data) print(response.status_code) print(response.headers)", "[ { \"to\": [ { \"email\": \"<EMAIL>\" } ] } ], \"from\": {", "os.environ.get('SOWEMAIL_API_KEY') request_headers = { \"Authorization\": 'Bearer {}'.format(api_key) } version = 1 client =", "\"from\": { \"email\": \"<EMAIL>\" }, \"subject\": \"Hello from SoWeMail\", \"content\": [ { \"type\":", "email sending example using python's sowerest library\" } ] } response = client.mail.send.post(request_body=data)", "os import sowerest host = \"http://api.sowemail.com:9000\" api_key = os.environ.get('SOWEMAIL_API_KEY') request_headers = { \"Authorization\":", "request_headers = { \"Authorization\": 'Bearer {}'.format(api_key) } version = 1 client = sowerest.Client(host=host,", "SoWeMail\", \"content\": [ { \"type\": \"text/plain\", \"value\": \"Simple email sending example using python's", "\"subject\": \"Hello from SoWeMail\", \"content\": [ { \"type\": \"text/plain\", \"value\": \"Simple email sending", "data = { \"personalizations\": [ { \"to\": [ { \"email\": \"<EMAIL>\" } ]", "{ \"type\": \"text/plain\", \"value\": \"Simple email sending example using python's sowerest library\" }" ]
[ "in group['params']: state = optimizer.state[p] if('step' in state and state['step']>=1024): state['step'] = 1000", "np.mean(test_accu) print(accuracy_test) # # In[51]: # #Calculate accuracy of trained model on the", "construct CIFAR-10 dataset. test_dataset = torchvision.datasets.CIFAR10(root='./data/', train=False, transform=transforms.ToTensor(), download=False) # Data loader (this", "# Mini-batch images and labels. images, labels = data_iter.next() # # In[ ]:", "objective function is the negative log-likelihood function. loss = F.nll_loss(output, target) #This calculates", "gradients (via backpropagation) loss.backward() train_loss.append(loss.data[0]) #The parameters for the model are updated using", "entire model. # torch.save(model, 'model.ckpt') # model = torch.load('model.ckpt') # In[ ]: #", ") self.conv8_bn = nn.BatchNorm2d(64) self.conv8_drop = nn.Dropout2d() self.conv9 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2", "model(data).data for i in range(50): output[i,:,:] = output[i,:,:] / (i+1) # prediction[i] =", "# In[3]: # In[4]: #number of hidden units H = 500 #Model architecture", "= F.relu(self.fc2(x)) x = self.fc3(x) return F.log_softmax(x, dim=1) model = CIFAR10Model() model.cuda() #", "#Model architecture class CIFAR10Model(nn.Module): def __init__(self): super(CIFAR10Model, self).__init__() # input is 3x32x32 #These", "F.max_pool2d(x, kernel_size=2,stride=2) # x = self.conv4_drop(x) x = F.relu(self.conv5_bn(self.conv5(x))) x = F.relu(self.conv6(x)) #", "way). test_loader = torch.utils.data.DataLoader(dataset=test_dataset, batch_size=batch_size, shuffle=False) # When iteration starts, queue and thread", "padding=2 ) self.conv9_bn = nn.BatchNorm2d(64) self.conv9_drop = nn.Dropout2d() self.fc1 = nn.Linear(64 * 5", "Model for epoch in range(num_epochs): train_accu = [] for images, labels in train_loader:", "CIFAR10Model(nn.Module): def __init__(self): super(CIFAR10Model, self).__init__() # input is 3x32x32 #These variables store the", "CIFAR-10 dataset. train_dataset = torchvision.datasets.CIFAR10(root='./data/', train=True, transform=transforms.Compose([transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.ColorJitter(brightness=0.4), transforms.ToTensor()]), download=False) # Data", "randint import torch import torchvision import torchvision.transforms as transforms import torch.nn as nn", "target) prediction = output.data.max(1)[1] # first column has actual prob. accuracy = (", "nn.Dropout2d() self.fc1 = nn.Linear(64 * 5 * 5, H) self.fc2 = nn.Linear(H, H)", "model on the Test Set model.eval() test_accu = [] for images, labels in", "kernel_size=4,stride=1, padding=2 ) self.conv5_bn = nn.BatchNorm2d(64) self.conv6 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 )", "way). train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True) # When iteration starts, queue and thread", "prediction.eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) accuracy_test = np.mean(test_accu) print(accuracy_test) # # In[51]: # #Calculate", "self.conv9_bn = nn.BatchNorm2d(64) self.conv9_drop = nn.Dropout2d() self.fc1 = nn.Linear(64 * 5 * 5,", "In[1]: import numpy as np import h5py import time import copy from random", "x = self.conv8_drop(x) x = F.relu(self.conv9_bn(self.conv9(x))) # x = self.conv9_drop(x) x = x.view(x.size(0),", "= F.relu(self.conv8_bn(self.conv8(x))) # x = self.conv8_drop(x) x = F.relu(self.conv9_bn(self.conv9(x))) # x = self.conv9_drop(x)", "output.data.max(1)[1] # first column has actual prob. accuracy = ( float( prediction.eq(target.data).sum() )", "target = Variable(images).cuda(), Variable(labels).cuda() optimizer.zero_grad() output = model(data) loss = F.nll_loss(output, target) prediction", "torch.zeros((50,50,10)) prediction = torch.zeros((50,1)) accuracy = torch.zeros((50,1)) test_accu = [] for images, labels", "# # coding: utf-8 # # In[1]: import numpy as np import h5py", "stride=1, padding=2 ) self.conv1_bn = nn.BatchNorm2d(64) self.conv2 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 )", "64, kernel_size=3,stride=1, padding=0 ) self.conv8_bn = nn.BatchNorm2d(64) self.conv8_drop = nn.Dropout2d() self.conv9 = nn.Conv2d(64,", "/ (i+1) # prediction[i] = output[i,:,:].data.max(1)[1] # first column has actual prob import", "#number of hidden units H = 500 #Model architecture class CIFAR10Model(nn.Module): def __init__(self):", "range(50): output[i,:,:] = output[i,:,:] / (i+1) # prediction[i] = output[i,:,:].data.max(1)[1] # first column", "accuracy of trained model on the Test Set # # model.eval() output =", "output[i,:,:] = output[i,:,:] / (i+1) # prediction[i] = output[i,:,:].data.max(1)[1] # first column has", "64, kernel_size=4,stride=1, padding=2 ) self.conv2_drop = nn.Dropout2d() self.conv3 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2", "# Download and construct CIFAR-10 dataset. test_dataset = torchvision.datasets.CIFAR10(root='./data/', train=False, transform=transforms.ToTensor(), download=False) #", "H) self.fc2 = nn.Linear(H, H) self.fc3 = nn.Linear(H, 10) def forward(self, x): #Here", "p in group['params']: state = optimizer.state[p] if('step' in state and state['step']>=1024): state['step'] =", "labels. images, labels = data_iter.next() # In[3]: # In[4]: #number of hidden units", "= F.nll_loss(output, target) prediction = output.data.max(1)[1] # first column has actual prob. accuracy", "print(accuracy_test) # # In[51]: # #Calculate accuracy of trained model on the Test", "import torch import torchvision import torchvision.transforms as transforms import torch.nn as nn import", "a very simple way). test_loader = torch.utils.data.DataLoader(dataset=test_dataset, batch_size=batch_size, shuffle=False) # When iteration starts,", "actual prob. accuracy = ( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 train_accu.append(accuracy) accuracy_epoch = np.mean(train_accu)", "to load data from files. data_iter = iter(train_loader) # Mini-batch images and labels.", "h5py import time import copy from random import randint import torch import torchvision", "64, kernel_size=4, stride=1, padding=2 ) self.conv1_bn = nn.BatchNorm2d(64) self.conv2 = nn.Conv2d(64, 64, kernel_size=4,stride=1,", "= nn.BatchNorm2d(64) self.conv8_drop = nn.Dropout2d() self.conv9 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv9_bn", "x = F.relu(self.conv3_bn(self.conv3(x))) x = F.relu(self.conv4(x)) x = F.max_pool2d(x, kernel_size=2,stride=2) # x =", "from random import randint import torch import torchvision import torchvision.transforms as transforms import", "= nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv5_bn = nn.BatchNorm2d(64) self.conv6 = nn.Conv2d(64, 64,", "so we need to set the stored gradients to zero when there’s a", "has actual prob import pdb; pdb.set_trace() prediction[i] = torch.max(output[i,:,:],1) accuracy[i] = ( float(", "nn.BatchNorm2d(64) self.conv2 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv2_drop = nn.Dropout2d() self.conv3 =", "x.view(x.size(0), -1) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = self.fc3(x) return F.log_softmax(x,", "actual prob import pdb; pdb.set_trace() prediction[i] = torch.max(output[i,:,:],1) accuracy[i] = ( float( prediction[i].eq(target.data).sum()", "= Variable(images).cuda(), Variable(labels).cuda() optimizer.zero_grad() output = model(data) loss = F.nll_loss(output, target) prediction =", "(this provides queues and threads in a very simple way). test_loader = torch.utils.data.DataLoader(dataset=test_dataset,", ") self.conv3_bn = nn.BatchNorm2d(64) self.conv4 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv4_drop =", "queues and threads in a very simple way). test_loader = torch.utils.data.DataLoader(dataset=test_dataset, batch_size=batch_size, shuffle=False)", "to zero when there’s a new batch of data. optimizer.zero_grad() #Forward propagation of", "start to load data from files. data_iter = iter(test_loader) # Mini-batch images and", "loader (this provides queues and threads in a very simple way). train_loader =", "transforms import torch.nn as nn import torch.nn.functional as F import torch.optim as optim", "model = torch.load('model.ckpt') # In[ ]: # Download and construct CIFAR-10 dataset. test_dataset", "as optim from torch.autograd import Variable # In[2]: batch_size = 50 # Download", "self.conv7 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv7_bn = nn.BatchNorm2d(64) self.conv8 = nn.Conv2d(64,", "In[ ]: # Download and construct CIFAR-10 dataset. test_dataset = torchvision.datasets.CIFAR10(root='./data/', train=False, transform=transforms.ToTensor(),", "images and labels. images, labels = data_iter.next() # # In[ ]: #Calculate accuracy", "# Data loader (this provides queues and threads in a very simple way).", "simple way). test_loader = torch.utils.data.DataLoader(dataset=test_dataset, batch_size=batch_size, shuffle=False) # When iteration starts, queue and", "loss.backward() train_loss.append(loss.data[0]) #The parameters for the model are updated using stochastic gradient descent.", "the model are updated using stochastic gradient descent. for group in optimizer.param_groups: for", "are updated using stochastic gradient descent. for group in optimizer.param_groups: for p in", "in range(50): output[i,:,:] = output[i,:,:] / (i+1) # prediction[i] = output[i,:,:].data.max(1)[1] # first", "Variable(labels).cuda() # optimizer.zero_grad() output[0,:,:] = model(data).data for i in range(1,50): output[i,:,:] = output[i-1,:,:]", "Data loader (this provides queues and threads in a very simple way). train_loader", "labels in test_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() optimizer.zero_grad() output = model(data) loss", "the gradients (via backpropagation) loss.backward() train_loss.append(loss.data[0]) #The parameters for the model are updated", "= nn.Dropout2d() self.fc1 = nn.Linear(64 * 5 * 5, H) self.fc2 = nn.Linear(H,", "output = model(data) #The objective function is the negative log-likelihood function. loss =", "test_loader = torch.utils.data.DataLoader(dataset=test_dataset, batch_size=batch_size, shuffle=False) # When iteration starts, queue and thread start", "In[ ]: #Calculate accuracy of trained model on the Test Set model.eval() test_accu", ") self.conv2_drop = nn.Dropout2d() self.conv3 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv3_bn =", "# coding: utf-8 # # In[1]: import numpy as np import h5py import", "self.conv9_drop(x) x = x.view(x.size(0), -1) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x =", "= F.relu(self.conv4(x)) x = F.max_pool2d(x, kernel_size=2,stride=2) # x = self.conv4_drop(x) x = F.relu(self.conv5_bn(self.conv5(x)))", "Variable # In[2]: batch_size = 50 # Download and construct CIFAR-10 dataset. train_dataset", "F.log_softmax(x, dim=1) model = CIFAR10Model() model.cuda() # In[5]: #Stochastic gradient descent optimizer optimizer", "batch_size = 50 # Download and construct CIFAR-10 dataset. train_dataset = torchvision.datasets.CIFAR10(root='./data/', train=True,", "stored gradients to zero when there’s a new batch of data. optimizer.zero_grad() #Forward", "= CIFAR10Model() model.cuda() # In[5]: #Stochastic gradient descent optimizer optimizer = optim.RMSprop(model.parameters(), lr=0.0001)", "= nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv2_drop = nn.Dropout2d() self.conv3 = nn.Conv2d(64, 64,", "[] # In[6]: #Train Model for epoch in range(num_epochs): train_accu = [] for", "propagation of the model, i.e. calculate the hidden units and the output. output", "Variable(labels).cuda() optimizer.zero_grad() output = model(data) loss = F.nll_loss(output, target) prediction = output.data.max(1)[1] #", "on the training set. prediction = output.data.max(1)[1] # first column has actual prob.", "nn.Dropout2d() self.conv9 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv9_bn = nn.BatchNorm2d(64) self.conv9_drop =", "F.nll_loss(output, target) #This calculates the gradients (via backpropagation) loss.backward() train_loss.append(loss.data[0]) #The parameters for", "prob. accuracy = ( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) accuracy_test = np.mean(test_accu) print(accuracy_test)", "dim=1) model = CIFAR10Model() model.cuda() # In[5]: #Stochastic gradient descent optimizer optimizer =", "= Variable(images).cuda(), Variable(labels).cuda() # optimizer.zero_grad() output[0,:,:] = model(data).data for i in range(1,50): output[i,:,:]", "output[i,:,:] = output[i-1,:,:] + model(data).data for i in range(50): output[i,:,:] = output[i,:,:] /", "kernel_size=4,stride=1, padding=2 ) self.conv4_drop = nn.Dropout2d() self.conv5 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 )", "batch_size=batch_size, shuffle=True) # When iteration starts, queue and thread start to load data", "of trained model on the Test Set model.eval() test_accu = [] for images,", "actual prob. accuracy = ( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) accuracy_test = np.mean(test_accu)", "x = self.conv9_drop(x) x = x.view(x.size(0), -1) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x))", "state and state['step']>=1024): state['step'] = 1000 optimizer.step() #Calculate accuracy on the training set.", "calculates the gradients (via backpropagation) loss.backward() train_loss.append(loss.data[0]) #The parameters for the model are", "gradient descent. for group in optimizer.param_groups: for p in group['params']: state = optimizer.state[p]", "of trained model on the Test Set # # model.eval() output = torch.zeros((50,50,10))", "# coding: utf-8 # In[ ]: # # coding: utf-8 # # In[1]:", "prob import pdb; pdb.set_trace() prediction[i] = torch.max(output[i,:,:],1) accuracy[i] = ( float( prediction[i].eq(target.data).sum() )", "for i in range(1,50): output[i,:,:] = output[i-1,:,:] + model(data).data for i in range(50):", "# In[1]: import numpy as np import h5py import time import copy from", "labels in test_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() # optimizer.zero_grad() output[0,:,:] = model(data).data", "in optimizer.param_groups: for p in group['params']: state = optimizer.state[p] if('step' in state and", "# When iteration starts, queue and thread start to load data from files.", "kernel_size=4,stride=1, padding=2 ) self.conv9_bn = nn.BatchNorm2d(64) self.conv9_drop = nn.Dropout2d() self.fc1 = nn.Linear(64 *", "output[i-1,:,:] + model(data).data for i in range(50): output[i,:,:] = output[i,:,:] / (i+1) #", "= F.relu(self.conv7_bn(self.conv7(x))) x = F.relu(self.conv8_bn(self.conv8(x))) # x = self.conv8_drop(x) x = F.relu(self.conv9_bn(self.conv9(x))) #", "In[51]: # #Calculate accuracy of trained model on the Test Set # #", "as transforms import torch.nn as nn import torch.nn.functional as F import torch.optim as", "trained model on the Test Set # # model.eval() output = torch.zeros((50,50,10)) prediction", "self.conv9 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv9_bn = nn.BatchNorm2d(64) self.conv9_drop = nn.Dropout2d()", "64, kernel_size=4,stride=1, padding=2 ) self.conv3_bn = nn.BatchNorm2d(64) self.conv4 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2", "# x = self.conv9_drop(x) x = x.view(x.size(0), -1) x = F.relu(self.fc1(x)) x =", "images, labels = data_iter.next() # # In[ ]: #Calculate accuracy of trained model", "queues and threads in a very simple way). train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True)", "input is 3x32x32 #These variables store the model parameters. self.conv1 = nn.Conv2d(3, 64,", "padding=2 ) self.conv1_bn = nn.BatchNorm2d(64) self.conv2 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv2_drop", "accuracy = ( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 train_accu.append(accuracy) accuracy_epoch = np.mean(train_accu) print(epoch, accuracy_epoch)", "range(1,50): output[i,:,:] = output[i-1,:,:] + model(data).data for i in range(50): output[i,:,:] = output[i,:,:]", "Download and construct CIFAR-10 dataset. test_dataset = torchvision.datasets.CIFAR10(root='./data/', train=False, transform=transforms.ToTensor(), download=False) # Data", "print(epoch, accuracy_epoch) # # Save and load the entire model. # torch.save(model, 'model.ckpt')", "self.fc2 = nn.Linear(H, H) self.fc3 = nn.Linear(H, 10) def forward(self, x): #Here is", "train=False, transform=transforms.ToTensor(), download=False) # Data loader (this provides queues and threads in a", "= nn.BatchNorm2d(64) self.conv9_drop = nn.Dropout2d() self.fc1 = nn.Linear(64 * 5 * 5, H)", "= iter(train_loader) # Mini-batch images and labels. images, labels = data_iter.next() # In[3]:", "+ model(data).data for i in range(50): output[i,:,:] = output[i,:,:] / (i+1) # prediction[i]", "x = F.relu(self.conv1_bn(self.conv1(x))) x = F.relu(self.conv2(x)) x = F.max_pool2d(x, kernel_size=2,stride=2) x = self.conv2_drop(x)", "Variable(images).cuda(), Variable(labels).cuda() # optimizer.zero_grad() output[0,:,:] = model(data).data for i in range(1,50): output[i,:,:] =", "import torchvision.transforms as transforms import torch.nn as nn import torch.nn.functional as F import", "in range(num_epochs): train_accu = [] for images, labels in train_loader: data, target =", "torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True) # When iteration starts, queue and thread start to load", "target) #This calculates the gradients (via backpropagation) loss.backward() train_loss.append(loss.data[0]) #The parameters for the", "= output[i,:,:].data.max(1)[1] # first column has actual prob import pdb; pdb.set_trace() prediction[i] =", "data_iter.next() # In[3]: # In[4]: #number of hidden units H = 500 #Model", "variables store the model parameters. self.conv1 = nn.Conv2d(3, 64, kernel_size=4, stride=1, padding=2 )", "nn.Dropout2d() self.conv7 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv7_bn = nn.BatchNorm2d(64) self.conv8 =", "loss = F.nll_loss(output, target) #This calculates the gradients (via backpropagation) loss.backward() train_loss.append(loss.data[0]) #The", "[] for images, labels in train_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() #PyTorch \"accumulates", "train=True, transform=transforms.Compose([transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.ColorJitter(brightness=0.4), transforms.ToTensor()]), download=False) # Data loader (this provides queues and", "gradient descent optimizer optimizer = optim.RMSprop(model.parameters(), lr=0.0001) num_epochs = 1 model.train() train_loss =", ") self.conv5_bn = nn.BatchNorm2d(64) self.conv6 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv6_drop =", "transform=transforms.ToTensor(), download=False) # Data loader (this provides queues and threads in a very", "self.conv9_drop = nn.Dropout2d() self.fc1 = nn.Linear(64 * 5 * 5, H) self.fc2 =", "range(num_epochs): train_accu = [] for images, labels in train_loader: data, target = Variable(images).cuda(),", "# #Calculate accuracy of trained model on the Test Set # # model.eval()", "and labels. images, labels = data_iter.next() # In[3]: # In[4]: #number of hidden", "__init__(self): super(CIFAR10Model, self).__init__() # input is 3x32x32 #These variables store the model parameters.", "coding: utf-8 # In[ ]: # # coding: utf-8 # # In[1]: import", "nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv2_drop = nn.Dropout2d() self.conv3 = nn.Conv2d(64, 64, kernel_size=4,stride=1,", "= [] for images, labels in train_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() #PyTorch", "torch.zeros((50,1)) accuracy = torch.zeros((50,1)) test_accu = [] for images, labels in test_loader: data,", "In[4]: #number of hidden units H = 500 #Model architecture class CIFAR10Model(nn.Module): def", "(i+1) # prediction[i] = output[i,:,:].data.max(1)[1] # first column has actual prob import pdb;", "= ( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 train_accu.append(accuracy) accuracy_epoch = np.mean(train_accu) print(epoch, accuracy_epoch) #", "= nn.BatchNorm2d(64) self.conv2 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv2_drop = nn.Dropout2d() self.conv3", "padding=0 ) self.conv6_drop = nn.Dropout2d() self.conv7 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv7_bn", "a very simple way). train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True) # When iteration starts,", "descent optimizer optimizer = optim.RMSprop(model.parameters(), lr=0.0001) num_epochs = 1 model.train() train_loss = []", "nn.Linear(64 * 5 * 5, H) self.fc2 = nn.Linear(H, H) self.fc3 = nn.Linear(H,", "units and the output. output = model(data) #The objective function is the negative", "torch.load('model.ckpt') # In[ ]: # Download and construct CIFAR-10 dataset. test_dataset = torchvision.datasets.CIFAR10(root='./data/',", "in range(1,50): output[i,:,:] = output[i-1,:,:] + model(data).data for i in range(50): output[i,:,:] =", "1000 optimizer.step() #Calculate accuracy on the training set. prediction = output.data.max(1)[1] # first", "= torch.max(output[i,:,:],1) accuracy[i] = ( float( prediction[i].eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) test_accu = np.asarray(test_accu).reshape((10000/50,50))", "test_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() # optimizer.zero_grad() output[0,:,:] = model(data).data for i", "and labels. images, labels = data_iter.next() # # In[ ]: #Calculate accuracy of", "self.conv6_drop = nn.Dropout2d() self.conv7 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv7_bn = nn.BatchNorm2d(64)", "= F.max_pool2d(x, kernel_size=2,stride=2) x = self.conv2_drop(x) x = F.relu(self.conv3_bn(self.conv3(x))) x = F.relu(self.conv4(x)) x", "in train_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() #PyTorch \"accumulates gradients\", so we need", "nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv8_bn = nn.BatchNorm2d(64) self.conv8_drop = nn.Dropout2d() self.conv9 =", "# In[ ]: # Download and construct CIFAR-10 dataset. test_dataset = torchvision.datasets.CIFAR10(root='./data/', train=False,", "self.conv2 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv2_drop = nn.Dropout2d() self.conv3 = nn.Conv2d(64,", "using stochastic gradient descent. for group in optimizer.param_groups: for p in group['params']: state", "= self.conv8_drop(x) x = F.relu(self.conv9_bn(self.conv9(x))) # x = self.conv9_drop(x) x = x.view(x.size(0), -1)", "test_accu = [] for images, labels in test_loader: data, target = Variable(images).cuda(), Variable(labels).cuda()", "# first column has actual prob import pdb; pdb.set_trace() prediction[i] = torch.max(output[i,:,:],1) accuracy[i]", "forward(self, x): #Here is where the network is specified. x = F.relu(self.conv1_bn(self.conv1(x))) x", "the network is specified. x = F.relu(self.conv1_bn(self.conv1(x))) x = F.relu(self.conv2(x)) x = F.max_pool2d(x,", "nn.Dropout2d() self.conv3 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv3_bn = nn.BatchNorm2d(64) self.conv4 =", "as F import torch.optim as optim from torch.autograd import Variable # In[2]: batch_size", "import time import copy from random import randint import torch import torchvision import", "network is specified. x = F.relu(self.conv1_bn(self.conv1(x))) x = F.relu(self.conv2(x)) x = F.max_pool2d(x, kernel_size=2,stride=2)", "= optim.RMSprop(model.parameters(), lr=0.0001) num_epochs = 1 model.train() train_loss = [] # In[6]: #Train", "test_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() optimizer.zero_grad() output = model(data) loss = F.nll_loss(output,", "optimizer.step() #Calculate accuracy on the training set. prediction = output.data.max(1)[1] # first column", "iter(train_loader) # Mini-batch images and labels. images, labels = data_iter.next() # In[3]: #", "labels in train_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() #PyTorch \"accumulates gradients\", so we", "def __init__(self): super(CIFAR10Model, self).__init__() # input is 3x32x32 #These variables store the model", "# In[6]: #Train Model for epoch in range(num_epochs): train_accu = [] for images,", "<reponame>bansalshubh91/Deep-CNN---CIFAR10<gh_stars>0 # coding: utf-8 # In[ ]: # # coding: utf-8 # #", "output[0,:,:] = model(data).data for i in range(1,50): output[i,:,:] = output[i-1,:,:] + model(data).data for", "hidden units H = 500 #Model architecture class CIFAR10Model(nn.Module): def __init__(self): super(CIFAR10Model, self).__init__()", "load data from files. data_iter = iter(train_loader) # Mini-batch images and labels. images,", "# In[ ]: #Calculate accuracy of trained model on the Test Set model.eval()", "= 50 # Download and construct CIFAR-10 dataset. train_dataset = torchvision.datasets.CIFAR10(root='./data/', train=True, transform=transforms.Compose([transforms.RandomHorizontalFlip(),", "= F.relu(self.conv1_bn(self.conv1(x))) x = F.relu(self.conv2(x)) x = F.max_pool2d(x, kernel_size=2,stride=2) x = self.conv2_drop(x) x", "#The objective function is the negative log-likelihood function. loss = F.nll_loss(output, target) #This", "import copy from random import randint import torch import torchvision import torchvision.transforms as", "nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv5_bn = nn.BatchNorm2d(64) self.conv6 = nn.Conv2d(64, 64, kernel_size=3,stride=1,", "F.relu(self.conv8_bn(self.conv8(x))) # x = self.conv8_drop(x) x = F.relu(self.conv9_bn(self.conv9(x))) # x = self.conv9_drop(x) x", "prediction = torch.zeros((50,1)) accuracy = torch.zeros((50,1)) test_accu = [] for images, labels in", "torchvision.transforms as transforms import torch.nn as nn import torch.nn.functional as F import torch.optim", "#The parameters for the model are updated using stochastic gradient descent. for group", "x = F.relu(self.conv6(x)) # x = self.conv6_drop(x) x = F.relu(self.conv7_bn(self.conv7(x))) x = F.relu(self.conv8_bn(self.conv8(x)))", "self.conv1_bn = nn.BatchNorm2d(64) self.conv2 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv2_drop = nn.Dropout2d()", "accuracy_epoch) # # Save and load the entire model. # torch.save(model, 'model.ckpt') #", "images, labels in test_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() optimizer.zero_grad() output = model(data)", "the negative log-likelihood function. loss = F.nll_loss(output, target) #This calculates the gradients (via", "#These variables store the model parameters. self.conv1 = nn.Conv2d(3, 64, kernel_size=4, stride=1, padding=2", "Set model.eval() test_accu = [] for images, labels in test_loader: data, target =", "= [] for images, labels in test_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() optimizer.zero_grad()", "the model, i.e. calculate the hidden units and the output. output = model(data)", "self.conv8_bn = nn.BatchNorm2d(64) self.conv8_drop = nn.Dropout2d() self.conv9 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 )", "torch.autograd import Variable # In[2]: batch_size = 50 # Download and construct CIFAR-10", "= nn.BatchNorm2d(64) self.conv6 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv6_drop = nn.Dropout2d() self.conv7", "[] for images, labels in test_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() optimizer.zero_grad() output", "nn.BatchNorm2d(64) self.conv9_drop = nn.Dropout2d() self.fc1 = nn.Linear(64 * 5 * 5, H) self.fc2", "for p in group['params']: state = optimizer.state[p] if('step' in state and state['step']>=1024): state['step']", "np import h5py import time import copy from random import randint import torch", "F.nll_loss(output, target) prediction = output.data.max(1)[1] # first column has actual prob. accuracy =", "nn.BatchNorm2d(64) self.conv8_drop = nn.Dropout2d() self.conv9 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv9_bn =", "When iteration starts, queue and thread start to load data from files. data_iter", "shuffle=False) # When iteration starts, queue and thread start to load data from", "= x.view(x.size(0), -1) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = self.fc3(x) return", "= F.relu(self.conv5_bn(self.conv5(x))) x = F.relu(self.conv6(x)) # x = self.conv6_drop(x) x = F.relu(self.conv7_bn(self.conv7(x))) x", "construct CIFAR-10 dataset. train_dataset = torchvision.datasets.CIFAR10(root='./data/', train=True, transform=transforms.Compose([transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.ColorJitter(brightness=0.4), transforms.ToTensor()]), download=False) #", "= output[i,:,:] / (i+1) # prediction[i] = output[i,:,:].data.max(1)[1] # first column has actual", "column has actual prob. accuracy = ( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) accuracy_test", "model(data) loss = F.nll_loss(output, target) prediction = output.data.max(1)[1] # first column has actual", "model.eval() output = torch.zeros((50,50,10)) prediction = torch.zeros((50,1)) accuracy = torch.zeros((50,1)) test_accu = []", "model(data) #The objective function is the negative log-likelihood function. loss = F.nll_loss(output, target)", "data, target = Variable(images).cuda(), Variable(labels).cuda() optimizer.zero_grad() output = model(data) loss = F.nll_loss(output, target)", "data, target = Variable(images).cuda(), Variable(labels).cuda() # optimizer.zero_grad() output[0,:,:] = model(data).data for i in", "lr=0.0001) num_epochs = 1 model.train() train_loss = [] # In[6]: #Train Model for", "kernel_size=3,stride=1, padding=0 ) self.conv8_bn = nn.BatchNorm2d(64) self.conv8_drop = nn.Dropout2d() self.conv9 = nn.Conv2d(64, 64,", "50 # Download and construct CIFAR-10 dataset. train_dataset = torchvision.datasets.CIFAR10(root='./data/', train=True, transform=transforms.Compose([transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(),", "= nn.Linear(64 * 5 * 5, H) self.fc2 = nn.Linear(H, H) self.fc3 =", ") /float(batch_size))*100.0 test_accu.append(accuracy) test_accu = np.asarray(test_accu).reshape((10000/50,50)) accuracy_test = np.mean(test_accu, axis = 0) print(accuracy_test)", "images and labels. images, labels = data_iter.next() # In[3]: # In[4]: #number of", "= torch.zeros((50,50,10)) prediction = torch.zeros((50,1)) accuracy = torch.zeros((50,1)) test_accu = [] for images,", "torch.zeros((50,1)) test_accu = [] for images, labels in test_loader: data, target = Variable(images).cuda(),", "Test Set # # model.eval() output = torch.zeros((50,50,10)) prediction = torch.zeros((50,1)) accuracy =", "optimizer.zero_grad() output = model(data) loss = F.nll_loss(output, target) prediction = output.data.max(1)[1] # first", "= self.conv9_drop(x) x = x.view(x.size(0), -1) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x", "self.conv7_bn = nn.BatchNorm2d(64) self.conv8 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv8_bn = nn.BatchNorm2d(64)", "and construct CIFAR-10 dataset. train_dataset = torchvision.datasets.CIFAR10(root='./data/', train=True, transform=transforms.Compose([transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.ColorJitter(brightness=0.4), transforms.ToTensor()]), download=False)", "files. data_iter = iter(test_loader) # Mini-batch images and labels. images, labels = data_iter.next()", "self.conv5 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv5_bn = nn.BatchNorm2d(64) self.conv6 = nn.Conv2d(64,", "is specified. x = F.relu(self.conv1_bn(self.conv1(x))) x = F.relu(self.conv2(x)) x = F.max_pool2d(x, kernel_size=2,stride=2) x", "= [] # In[6]: #Train Model for epoch in range(num_epochs): train_accu = []", "model are updated using stochastic gradient descent. for group in optimizer.param_groups: for p", "F.relu(self.conv9_bn(self.conv9(x))) # x = self.conv9_drop(x) x = x.view(x.size(0), -1) x = F.relu(self.fc1(x)) x", "model(data).data for i in range(1,50): output[i,:,:] = output[i-1,:,:] + model(data).data for i in", "import Variable # In[2]: batch_size = 50 # Download and construct CIFAR-10 dataset.", "self.conv4_drop(x) x = F.relu(self.conv5_bn(self.conv5(x))) x = F.relu(self.conv6(x)) # x = self.conv6_drop(x) x =", "model = CIFAR10Model() model.cuda() # In[5]: #Stochastic gradient descent optimizer optimizer = optim.RMSprop(model.parameters(),", "= F.relu(self.conv2(x)) x = F.max_pool2d(x, kernel_size=2,stride=2) x = self.conv2_drop(x) x = F.relu(self.conv3_bn(self.conv3(x))) x", "output = torch.zeros((50,50,10)) prediction = torch.zeros((50,1)) accuracy = torch.zeros((50,1)) test_accu = [] for", "import numpy as np import h5py import time import copy from random import", "= F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = self.fc3(x) return F.log_softmax(x, dim=1) model =", "kernel_size=4, stride=1, padding=2 ) self.conv1_bn = nn.BatchNorm2d(64) self.conv2 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2", "transforms.ToTensor()]), download=False) # Data loader (this provides queues and threads in a very", "self).__init__() # input is 3x32x32 #These variables store the model parameters. self.conv1 =", "and construct CIFAR-10 dataset. test_dataset = torchvision.datasets.CIFAR10(root='./data/', train=False, transform=transforms.ToTensor(), download=False) # Data loader", "x = F.relu(self.conv4(x)) x = F.max_pool2d(x, kernel_size=2,stride=2) # x = self.conv4_drop(x) x =", "= torch.utils.data.DataLoader(dataset=test_dataset, batch_size=batch_size, shuffle=False) # When iteration starts, queue and thread start to", "= self.conv4_drop(x) x = F.relu(self.conv5_bn(self.conv5(x))) x = F.relu(self.conv6(x)) # x = self.conv6_drop(x) x", "# prediction[i] = output[i,:,:].data.max(1)[1] # first column has actual prob import pdb; pdb.set_trace()", "as np import h5py import time import copy from random import randint import", "F.relu(self.fc2(x)) x = self.fc3(x) return F.log_softmax(x, dim=1) model = CIFAR10Model() model.cuda() # In[5]:", "# Download and construct CIFAR-10 dataset. train_dataset = torchvision.datasets.CIFAR10(root='./data/', train=True, transform=transforms.Compose([transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.ColorJitter(brightness=0.4),", "# model = torch.load('model.ckpt') # In[ ]: # Download and construct CIFAR-10 dataset.", "nn.BatchNorm2d(64) self.conv6 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv6_drop = nn.Dropout2d() self.conv7 =", "and threads in a very simple way). test_loader = torch.utils.data.DataLoader(dataset=test_dataset, batch_size=batch_size, shuffle=False) #", "/float(batch_size))*100.0 test_accu.append(accuracy) accuracy_test = np.mean(test_accu) print(accuracy_test) # # In[51]: # #Calculate accuracy of", "self.fc3 = nn.Linear(H, 10) def forward(self, x): #Here is where the network is", "thread start to load data from files. data_iter = iter(test_loader) # Mini-batch images", "need to set the stored gradients to zero when there’s a new batch", "float( prediction[i].eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) test_accu = np.asarray(test_accu).reshape((10000/50,50)) accuracy_test = np.mean(test_accu, axis =", "model parameters. self.conv1 = nn.Conv2d(3, 64, kernel_size=4, stride=1, padding=2 ) self.conv1_bn = nn.BatchNorm2d(64)", "kernel_size=3,stride=1, padding=0 ) self.conv6_drop = nn.Dropout2d() self.conv7 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 )", "nn.Conv2d(3, 64, kernel_size=4, stride=1, padding=2 ) self.conv1_bn = nn.BatchNorm2d(64) self.conv2 = nn.Conv2d(64, 64,", "-1) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = self.fc3(x) return F.log_softmax(x, dim=1)", "queue and thread start to load data from files. data_iter = iter(train_loader) #", "H = 500 #Model architecture class CIFAR10Model(nn.Module): def __init__(self): super(CIFAR10Model, self).__init__() # input", "x = self.fc3(x) return F.log_softmax(x, dim=1) model = CIFAR10Model() model.cuda() # In[5]: #Stochastic", "( float( prediction[i].eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) test_accu = np.asarray(test_accu).reshape((10000/50,50)) accuracy_test = np.mean(test_accu, axis", "In[5]: #Stochastic gradient descent optimizer optimizer = optim.RMSprop(model.parameters(), lr=0.0001) num_epochs = 1 model.train()", "F.relu(self.conv7_bn(self.conv7(x))) x = F.relu(self.conv8_bn(self.conv8(x))) # x = self.conv8_drop(x) x = F.relu(self.conv9_bn(self.conv9(x))) # x", "and thread start to load data from files. data_iter = iter(test_loader) # Mini-batch", "# In[2]: batch_size = 50 # Download and construct CIFAR-10 dataset. train_dataset =", "threads in a very simple way). test_loader = torch.utils.data.DataLoader(dataset=test_dataset, batch_size=batch_size, shuffle=False) # When", "of hidden units H = 500 #Model architecture class CIFAR10Model(nn.Module): def __init__(self): super(CIFAR10Model,", "class CIFAR10Model(nn.Module): def __init__(self): super(CIFAR10Model, self).__init__() # input is 3x32x32 #These variables store", "Set # # model.eval() output = torch.zeros((50,50,10)) prediction = torch.zeros((50,1)) accuracy = torch.zeros((50,1))", "kernel_size=4,stride=1, padding=2 ) self.conv3_bn = nn.BatchNorm2d(64) self.conv4 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 )", "images, labels = data_iter.next() # In[3]: # In[4]: #number of hidden units H", "= nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv9_bn = nn.BatchNorm2d(64) self.conv9_drop = nn.Dropout2d() self.fc1", "= 1000 optimizer.step() #Calculate accuracy on the training set. prediction = output.data.max(1)[1] #", "F.relu(self.conv2(x)) x = F.max_pool2d(x, kernel_size=2,stride=2) x = self.conv2_drop(x) x = F.relu(self.conv3_bn(self.conv3(x))) x =", "= F.relu(self.conv3_bn(self.conv3(x))) x = F.relu(self.conv4(x)) x = F.max_pool2d(x, kernel_size=2,stride=2) # x = self.conv4_drop(x)", "state = optimizer.state[p] if('step' in state and state['step']>=1024): state['step'] = 1000 optimizer.step() #Calculate", "optimizer.zero_grad() #Forward propagation of the model, i.e. calculate the hidden units and the", "set. prediction = output.data.max(1)[1] # first column has actual prob. accuracy = (", "utf-8 # In[ ]: # # coding: utf-8 # # In[1]: import numpy", "function. loss = F.nll_loss(output, target) #This calculates the gradients (via backpropagation) loss.backward() train_loss.append(loss.data[0])", "starts, queue and thread start to load data from files. data_iter = iter(test_loader)", "batch of data. optimizer.zero_grad() #Forward propagation of the model, i.e. calculate the hidden", "test_dataset = torchvision.datasets.CIFAR10(root='./data/', train=False, transform=transforms.ToTensor(), download=False) # Data loader (this provides queues and", "64, kernel_size=3,stride=1, padding=0 ) self.conv6_drop = nn.Dropout2d() self.conv7 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0", "64, kernel_size=3,stride=1, padding=0 ) self.conv7_bn = nn.BatchNorm2d(64) self.conv8 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0", "nn.BatchNorm2d(64) self.conv4 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv4_drop = nn.Dropout2d() self.conv5 =", "for epoch in range(num_epochs): train_accu = [] for images, labels in train_loader: data,", "]: # Download and construct CIFAR-10 dataset. test_dataset = torchvision.datasets.CIFAR10(root='./data/', train=False, transform=transforms.ToTensor(), download=False)", "has actual prob. accuracy = ( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) accuracy_test =", "data from files. data_iter = iter(test_loader) # Mini-batch images and labels. images, labels", "= output.data.max(1)[1] # first column has actual prob. accuracy = ( float( prediction.eq(target.data).sum()", "for group in optimizer.param_groups: for p in group['params']: state = optimizer.state[p] if('step' in", "10) def forward(self, x): #Here is where the network is specified. x =", "def forward(self, x): #Here is where the network is specified. x = F.relu(self.conv1_bn(self.conv1(x)))", "import torch.optim as optim from torch.autograd import Variable # In[2]: batch_size = 50", "padding=2 ) self.conv5_bn = nn.BatchNorm2d(64) self.conv6 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv6_drop", "self.conv5_bn = nn.BatchNorm2d(64) self.conv6 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv6_drop = nn.Dropout2d()", "In[6]: #Train Model for epoch in range(num_epochs): train_accu = [] for images, labels", "to load data from files. data_iter = iter(test_loader) # Mini-batch images and labels.", "group in optimizer.param_groups: for p in group['params']: state = optimizer.state[p] if('step' in state", "= torch.load('model.ckpt') # In[ ]: # Download and construct CIFAR-10 dataset. test_dataset =", "= self.conv2_drop(x) x = F.relu(self.conv3_bn(self.conv3(x))) x = F.relu(self.conv4(x)) x = F.max_pool2d(x, kernel_size=2,stride=2) #", "optim.RMSprop(model.parameters(), lr=0.0001) num_epochs = 1 model.train() train_loss = [] # In[6]: #Train Model", "Variable(labels).cuda() #PyTorch \"accumulates gradients\", so we need to set the stored gradients to", "model.eval() test_accu = [] for images, labels in test_loader: data, target = Variable(images).cuda(),", "# # In[51]: # #Calculate accuracy of trained model on the Test Set", "and threads in a very simple way). train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True) #", "target = Variable(images).cuda(), Variable(labels).cuda() #PyTorch \"accumulates gradients\", so we need to set the", "optimizer.state[p] if('step' in state and state['step']>=1024): state['step'] = 1000 optimizer.step() #Calculate accuracy on", "# x = self.conv4_drop(x) x = F.relu(self.conv5_bn(self.conv5(x))) x = F.relu(self.conv6(x)) # x =", "= [] for images, labels in test_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() #", "x = self.conv2_drop(x) x = F.relu(self.conv3_bn(self.conv3(x))) x = F.relu(self.conv4(x)) x = F.max_pool2d(x, kernel_size=2,stride=2)", "In[3]: # In[4]: #number of hidden units H = 500 #Model architecture class", "super(CIFAR10Model, self).__init__() # input is 3x32x32 #These variables store the model parameters. self.conv1", "state['step'] = 1000 optimizer.step() #Calculate accuracy on the training set. prediction = output.data.max(1)[1]", "x = F.relu(self.conv2(x)) x = F.max_pool2d(x, kernel_size=2,stride=2) x = self.conv2_drop(x) x = F.relu(self.conv3_bn(self.conv3(x)))", "(this provides queues and threads in a very simple way). train_loader = torch.utils.data.DataLoader(dataset=train_dataset,", "In[ ]: # # coding: utf-8 # # In[1]: import numpy as np", "train_dataset = torchvision.datasets.CIFAR10(root='./data/', train=True, transform=transforms.Compose([transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.ColorJitter(brightness=0.4), transforms.ToTensor()]), download=False) # Data loader (this", "backpropagation) loss.backward() train_loss.append(loss.data[0]) #The parameters for the model are updated using stochastic gradient", "self.conv3_bn = nn.BatchNorm2d(64) self.conv4 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv4_drop = nn.Dropout2d()", "train_accu.append(accuracy) accuracy_epoch = np.mean(train_accu) print(epoch, accuracy_epoch) # # Save and load the entire", "test_accu.append(accuracy) accuracy_test = np.mean(test_accu) print(accuracy_test) # # In[51]: # #Calculate accuracy of trained", "F.relu(self.conv6(x)) # x = self.conv6_drop(x) x = F.relu(self.conv7_bn(self.conv7(x))) x = F.relu(self.conv8_bn(self.conv8(x))) # x", "model, i.e. calculate the hidden units and the output. output = model(data) #The", "transform=transforms.Compose([transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.ColorJitter(brightness=0.4), transforms.ToTensor()]), download=False) # Data loader (this provides queues and threads", "images, labels in train_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() #PyTorch \"accumulates gradients\", so", "very simple way). train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True) # When iteration starts, queue", "training set. prediction = output.data.max(1)[1] # first column has actual prob. accuracy =", "= np.mean(test_accu) print(accuracy_test) # # In[51]: # #Calculate accuracy of trained model on", "# In[51]: # #Calculate accuracy of trained model on the Test Set #", "= self.fc3(x) return F.log_softmax(x, dim=1) model = CIFAR10Model() model.cuda() # In[5]: #Stochastic gradient", "x = F.relu(self.conv5_bn(self.conv5(x))) x = F.relu(self.conv6(x)) # x = self.conv6_drop(x) x = F.relu(self.conv7_bn(self.conv7(x)))", "nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv3_bn = nn.BatchNorm2d(64) self.conv4 = nn.Conv2d(64, 64, kernel_size=4,stride=1,", "the entire model. # torch.save(model, 'model.ckpt') # model = torch.load('model.ckpt') # In[ ]:", "torchvision import torchvision.transforms as transforms import torch.nn as nn import torch.nn.functional as F", "* 5, H) self.fc2 = nn.Linear(H, H) self.fc3 = nn.Linear(H, 10) def forward(self,", "5, H) self.fc2 = nn.Linear(H, H) self.fc3 = nn.Linear(H, 10) def forward(self, x):", "(via backpropagation) loss.backward() train_loss.append(loss.data[0]) #The parameters for the model are updated using stochastic", "64, kernel_size=4,stride=1, padding=2 ) self.conv5_bn = nn.BatchNorm2d(64) self.conv6 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0", ") self.conv6_drop = nn.Dropout2d() self.conv7 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv7_bn =", "simple way). train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True) # When iteration starts, queue and", "# # model.eval() output = torch.zeros((50,50,10)) prediction = torch.zeros((50,1)) accuracy = torch.zeros((50,1)) test_accu", "= nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv4_drop = nn.Dropout2d() self.conv5 = nn.Conv2d(64, 64,", "thread start to load data from files. data_iter = iter(train_loader) # Mini-batch images", "padding=0 ) self.conv7_bn = nn.BatchNorm2d(64) self.conv8 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv8_bn", "if('step' in state and state['step']>=1024): state['step'] = 1000 optimizer.step() #Calculate accuracy on the", "Variable(images).cuda(), Variable(labels).cuda() optimizer.zero_grad() output = model(data) loss = F.nll_loss(output, target) prediction = output.data.max(1)[1]", "train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True) # When iteration starts, queue and thread start", "prob. accuracy = ( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 train_accu.append(accuracy) accuracy_epoch = np.mean(train_accu) print(epoch,", "to set the stored gradients to zero when there’s a new batch of", "= iter(test_loader) # Mini-batch images and labels. images, labels = data_iter.next() # #", "hidden units and the output. output = model(data) #The objective function is the", "column has actual prob. accuracy = ( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 train_accu.append(accuracy) accuracy_epoch", "float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 train_accu.append(accuracy) accuracy_epoch = np.mean(train_accu) print(epoch, accuracy_epoch) # # Save", "for images, labels in test_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() # optimizer.zero_grad() output[0,:,:]", "starts, queue and thread start to load data from files. data_iter = iter(train_loader)", "# In[ ]: # # coding: utf-8 # # In[1]: import numpy as", "is the negative log-likelihood function. loss = F.nll_loss(output, target) #This calculates the gradients", "from files. data_iter = iter(test_loader) # Mini-batch images and labels. images, labels =", "threads in a very simple way). train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True) # When", "the stored gradients to zero when there’s a new batch of data. optimizer.zero_grad()", "= ( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) accuracy_test = np.mean(test_accu) print(accuracy_test) # #", "numpy as np import h5py import time import copy from random import randint", "# optimizer.zero_grad() output[0,:,:] = model(data).data for i in range(1,50): output[i,:,:] = output[i-1,:,:] +", "'model.ckpt') # model = torch.load('model.ckpt') # In[ ]: # Download and construct CIFAR-10", "#Calculate accuracy of trained model on the Test Set model.eval() test_accu = []", "Mini-batch images and labels. images, labels = data_iter.next() # # In[ ]: #Calculate", "F.relu(self.conv3_bn(self.conv3(x))) x = F.relu(self.conv4(x)) x = F.max_pool2d(x, kernel_size=2,stride=2) # x = self.conv4_drop(x) x", "F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = self.fc3(x) return F.log_softmax(x, dim=1) model = CIFAR10Model()", "self.fc3(x) return F.log_softmax(x, dim=1) model = CIFAR10Model() model.cuda() # In[5]: #Stochastic gradient descent", "x = F.relu(self.fc2(x)) x = self.fc3(x) return F.log_softmax(x, dim=1) model = CIFAR10Model() model.cuda()", "#Calculate accuracy on the training set. prediction = output.data.max(1)[1] # first column has", "a new batch of data. optimizer.zero_grad() #Forward propagation of the model, i.e. calculate", "import pdb; pdb.set_trace() prediction[i] = torch.max(output[i,:,:],1) accuracy[i] = ( float( prediction[i].eq(target.data).sum() ) /float(batch_size))*100.0", "H) self.fc3 = nn.Linear(H, 10) def forward(self, x): #Here is where the network", "shuffle=True) # When iteration starts, queue and thread start to load data from", "train_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() #PyTorch \"accumulates gradients\", so we need to", "data, target = Variable(images).cuda(), Variable(labels).cuda() #PyTorch \"accumulates gradients\", so we need to set", "the output. output = model(data) #The objective function is the negative log-likelihood function.", "i in range(1,50): output[i,:,:] = output[i-1,:,:] + model(data).data for i in range(50): output[i,:,:]", "stochastic gradient descent. for group in optimizer.param_groups: for p in group['params']: state =", "# # In[1]: import numpy as np import h5py import time import copy", "# model.eval() output = torch.zeros((50,50,10)) prediction = torch.zeros((50,1)) accuracy = torch.zeros((50,1)) test_accu =", "negative log-likelihood function. loss = F.nll_loss(output, target) #This calculates the gradients (via backpropagation)", "and thread start to load data from files. data_iter = iter(train_loader) # Mini-batch", "= torchvision.datasets.CIFAR10(root='./data/', train=True, transform=transforms.Compose([transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.ColorJitter(brightness=0.4), transforms.ToTensor()]), download=False) # Data loader (this provides", "optimizer.zero_grad() output[0,:,:] = model(data).data for i in range(1,50): output[i,:,:] = output[i-1,:,:] + model(data).data", "new batch of data. optimizer.zero_grad() #Forward propagation of the model, i.e. calculate the", "coding: utf-8 # # In[1]: import numpy as np import h5py import time", "load the entire model. # torch.save(model, 'model.ckpt') # model = torch.load('model.ckpt') # In[", "the Test Set model.eval() test_accu = [] for images, labels in test_loader: data,", "for i in range(50): output[i,:,:] = output[i,:,:] / (i+1) # prediction[i] = output[i,:,:].data.max(1)[1]", "data_iter = iter(train_loader) # Mini-batch images and labels. images, labels = data_iter.next() #", "self.conv2_drop = nn.Dropout2d() self.conv3 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv3_bn = nn.BatchNorm2d(64)", "500 #Model architecture class CIFAR10Model(nn.Module): def __init__(self): super(CIFAR10Model, self).__init__() # input is 3x32x32", "set the stored gradients to zero when there’s a new batch of data.", "log-likelihood function. loss = F.nll_loss(output, target) #This calculates the gradients (via backpropagation) loss.backward()", "= optimizer.state[p] if('step' in state and state['step']>=1024): state['step'] = 1000 optimizer.step() #Calculate accuracy", "when there’s a new batch of data. optimizer.zero_grad() #Forward propagation of the model,", "/float(batch_size))*100.0 train_accu.append(accuracy) accuracy_epoch = np.mean(train_accu) print(epoch, accuracy_epoch) # # Save and load the", "self.conv8_drop(x) x = F.relu(self.conv9_bn(self.conv9(x))) # x = self.conv9_drop(x) x = x.view(x.size(0), -1) x", "pdb.set_trace() prediction[i] = torch.max(output[i,:,:],1) accuracy[i] = ( float( prediction[i].eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) test_accu", "data_iter.next() # # In[ ]: #Calculate accuracy of trained model on the Test", "= nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv8_bn = nn.BatchNorm2d(64) self.conv8_drop = nn.Dropout2d() self.conv9", "images, labels in test_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() # optimizer.zero_grad() output[0,:,:] =", "dataset. train_dataset = torchvision.datasets.CIFAR10(root='./data/', train=True, transform=transforms.Compose([transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.ColorJitter(brightness=0.4), transforms.ToTensor()]), download=False) # Data loader", "self.conv4 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv4_drop = nn.Dropout2d() self.conv5 = nn.Conv2d(64,", "F.relu(self.conv4(x)) x = F.max_pool2d(x, kernel_size=2,stride=2) # x = self.conv4_drop(x) x = F.relu(self.conv5_bn(self.conv5(x))) x", "model. # torch.save(model, 'model.ckpt') # model = torch.load('model.ckpt') # In[ ]: # Download", "on the Test Set # # model.eval() output = torch.zeros((50,50,10)) prediction = torch.zeros((50,1))", "prediction = output.data.max(1)[1] # first column has actual prob. accuracy = ( float(", "import torch.nn.functional as F import torch.optim as optim from torch.autograd import Variable #", "i.e. calculate the hidden units and the output. output = model(data) #The objective", "3x32x32 #These variables store the model parameters. self.conv1 = nn.Conv2d(3, 64, kernel_size=4, stride=1,", "the model parameters. self.conv1 = nn.Conv2d(3, 64, kernel_size=4, stride=1, padding=2 ) self.conv1_bn =", "x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = self.fc3(x) return F.log_softmax(x, dim=1) model", "iteration starts, queue and thread start to load data from files. data_iter =", "= 500 #Model architecture class CIFAR10Model(nn.Module): def __init__(self): super(CIFAR10Model, self).__init__() # input is", "]: #Calculate accuracy of trained model on the Test Set model.eval() test_accu =", "accuracy of trained model on the Test Set model.eval() test_accu = [] for", "In[2]: batch_size = 50 # Download and construct CIFAR-10 dataset. train_dataset = torchvision.datasets.CIFAR10(root='./data/',", "= nn.Dropout2d() self.conv3 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv3_bn = nn.BatchNorm2d(64) self.conv4", "zero when there’s a new batch of data. optimizer.zero_grad() #Forward propagation of the", "and the output. output = model(data) #The objective function is the negative log-likelihood", "padding=0 ) self.conv8_bn = nn.BatchNorm2d(64) self.conv8_drop = nn.Dropout2d() self.conv9 = nn.Conv2d(64, 64, kernel_size=4,stride=1,", "nn import torch.nn.functional as F import torch.optim as optim from torch.autograd import Variable", "we need to set the stored gradients to zero when there’s a new", "= model(data) loss = F.nll_loss(output, target) prediction = output.data.max(1)[1] # first column has", "in test_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() optimizer.zero_grad() output = model(data) loss =", "nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv7_bn = nn.BatchNorm2d(64) self.conv8 = nn.Conv2d(64, 64, kernel_size=3,stride=1,", "accuracy_epoch = np.mean(train_accu) print(epoch, accuracy_epoch) # # Save and load the entire model.", "first column has actual prob. accuracy = ( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy)", "and state['step']>=1024): state['step'] = 1000 optimizer.step() #Calculate accuracy on the training set. prediction", "units H = 500 #Model architecture class CIFAR10Model(nn.Module): def __init__(self): super(CIFAR10Model, self).__init__() #", "\"accumulates gradients\", so we need to set the stored gradients to zero when", "# first column has actual prob. accuracy = ( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0", "self.conv4_drop = nn.Dropout2d() self.conv5 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv5_bn = nn.BatchNorm2d(64)", "# torch.save(model, 'model.ckpt') # model = torch.load('model.ckpt') # In[ ]: # Download and", "for images, labels in test_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() optimizer.zero_grad() output =", "# In[5]: #Stochastic gradient descent optimizer optimizer = optim.RMSprop(model.parameters(), lr=0.0001) num_epochs = 1", "and load the entire model. # torch.save(model, 'model.ckpt') # model = torch.load('model.ckpt') #", "self.conv1 = nn.Conv2d(3, 64, kernel_size=4, stride=1, padding=2 ) self.conv1_bn = nn.BatchNorm2d(64) self.conv2 =", ") /float(batch_size))*100.0 test_accu.append(accuracy) accuracy_test = np.mean(test_accu) print(accuracy_test) # # In[51]: # #Calculate accuracy", "= F.nll_loss(output, target) #This calculates the gradients (via backpropagation) loss.backward() train_loss.append(loss.data[0]) #The parameters", "batch_size=batch_size, shuffle=False) # When iteration starts, queue and thread start to load data", "load data from files. data_iter = iter(test_loader) # Mini-batch images and labels. images,", "self.conv3 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv3_bn = nn.BatchNorm2d(64) self.conv4 = nn.Conv2d(64,", "= nn.Dropout2d() self.conv9 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv9_bn = nn.BatchNorm2d(64) self.conv9_drop", "prediction[i] = output[i,:,:].data.max(1)[1] # first column has actual prob import pdb; pdb.set_trace() prediction[i]", "group['params']: state = optimizer.state[p] if('step' in state and state['step']>=1024): state['step'] = 1000 optimizer.step()", "x = F.max_pool2d(x, kernel_size=2,stride=2) x = self.conv2_drop(x) x = F.relu(self.conv3_bn(self.conv3(x))) x = F.relu(self.conv4(x))", "on the Test Set model.eval() test_accu = [] for images, labels in test_loader:", "calculate the hidden units and the output. output = model(data) #The objective function", "# # In[ ]: #Calculate accuracy of trained model on the Test Set", "torch.nn.functional as F import torch.optim as optim from torch.autograd import Variable # In[2]:", "= nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv6_drop = nn.Dropout2d() self.conv7 = nn.Conv2d(64, 64,", "provides queues and threads in a very simple way). train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size,", "accuracy on the training set. prediction = output.data.max(1)[1] # first column has actual", "utf-8 # # In[1]: import numpy as np import h5py import time import", "# Save and load the entire model. # torch.save(model, 'model.ckpt') # model =", "in test_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() # optimizer.zero_grad() output[0,:,:] = model(data).data for", "loss = F.nll_loss(output, target) prediction = output.data.max(1)[1] # first column has actual prob.", "F.max_pool2d(x, kernel_size=2,stride=2) x = self.conv2_drop(x) x = F.relu(self.conv3_bn(self.conv3(x))) x = F.relu(self.conv4(x)) x =", "64, kernel_size=4,stride=1, padding=2 ) self.conv9_bn = nn.BatchNorm2d(64) self.conv9_drop = nn.Dropout2d() self.fc1 = nn.Linear(64", "labels. images, labels = data_iter.next() # # In[ ]: #Calculate accuracy of trained", "model on the Test Set # # model.eval() output = torch.zeros((50,50,10)) prediction =", "return F.log_softmax(x, dim=1) model = CIFAR10Model() model.cuda() # In[5]: #Stochastic gradient descent optimizer", "from torch.autograd import Variable # In[2]: batch_size = 50 # Download and construct", ") self.conv7_bn = nn.BatchNorm2d(64) self.conv8 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv8_bn =", "column has actual prob import pdb; pdb.set_trace() prediction[i] = torch.max(output[i,:,:],1) accuracy[i] = (", "train_accu = [] for images, labels in train_loader: data, target = Variable(images).cuda(), Variable(labels).cuda()", "= nn.BatchNorm2d(64) self.conv8 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv8_bn = nn.BatchNorm2d(64) self.conv8_drop", "= nn.Linear(H, H) self.fc3 = nn.Linear(H, 10) def forward(self, x): #Here is where", "x): #Here is where the network is specified. x = F.relu(self.conv1_bn(self.conv1(x))) x =", "* 5 * 5, H) self.fc2 = nn.Linear(H, H) self.fc3 = nn.Linear(H, 10)", "= data_iter.next() # In[3]: # In[4]: #number of hidden units H = 500", "#This calculates the gradients (via backpropagation) loss.backward() train_loss.append(loss.data[0]) #The parameters for the model", "kernel_size=3,stride=1, padding=0 ) self.conv7_bn = nn.BatchNorm2d(64) self.conv8 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 )", ") self.conv1_bn = nn.BatchNorm2d(64) self.conv2 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv2_drop =", "gradients\", so we need to set the stored gradients to zero when there’s", "#Forward propagation of the model, i.e. calculate the hidden units and the output.", "train_loss = [] # In[6]: #Train Model for epoch in range(num_epochs): train_accu =", "F.relu(self.conv5_bn(self.conv5(x))) x = F.relu(self.conv6(x)) # x = self.conv6_drop(x) x = F.relu(self.conv7_bn(self.conv7(x))) x =", "queue and thread start to load data from files. data_iter = iter(test_loader) #", "in state and state['step']>=1024): state['step'] = 1000 optimizer.step() #Calculate accuracy on the training", "#PyTorch \"accumulates gradients\", so we need to set the stored gradients to zero", "provides queues and threads in a very simple way). test_loader = torch.utils.data.DataLoader(dataset=test_dataset, batch_size=batch_size,", "nn.Linear(H, H) self.fc3 = nn.Linear(H, 10) def forward(self, x): #Here is where the", "dataset. test_dataset = torchvision.datasets.CIFAR10(root='./data/', train=False, transform=transforms.ToTensor(), download=False) # Data loader (this provides queues", "start to load data from files. data_iter = iter(train_loader) # Mini-batch images and", "for images, labels in train_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() #PyTorch \"accumulates gradients\",", "transforms.RandomVerticalFlip(), transforms.ColorJitter(brightness=0.4), transforms.ToTensor()]), download=False) # Data loader (this provides queues and threads in", "labels = data_iter.next() # In[3]: # In[4]: #number of hidden units H =", "train_loss.append(loss.data[0]) #The parameters for the model are updated using stochastic gradient descent. for", "very simple way). test_loader = torch.utils.data.DataLoader(dataset=test_dataset, batch_size=batch_size, shuffle=False) # When iteration starts, queue", "= Variable(images).cuda(), Variable(labels).cuda() #PyTorch \"accumulates gradients\", so we need to set the stored", "torch.max(output[i,:,:],1) accuracy[i] = ( float( prediction[i].eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) test_accu = np.asarray(test_accu).reshape((10000/50,50)) accuracy_test", "padding=2 ) self.conv4_drop = nn.Dropout2d() self.conv5 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv5_bn", "self.conv2_drop(x) x = F.relu(self.conv3_bn(self.conv3(x))) x = F.relu(self.conv4(x)) x = F.max_pool2d(x, kernel_size=2,stride=2) # x", "= nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv7_bn = nn.BatchNorm2d(64) self.conv8 = nn.Conv2d(64, 64,", "architecture class CIFAR10Model(nn.Module): def __init__(self): super(CIFAR10Model, self).__init__() # input is 3x32x32 #These variables", "torch import torchvision import torchvision.transforms as transforms import torch.nn as nn import torch.nn.functional", "padding=2 ) self.conv3_bn = nn.BatchNorm2d(64) self.conv4 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv4_drop", "updated using stochastic gradient descent. for group in optimizer.param_groups: for p in group['params']:", "there’s a new batch of data. optimizer.zero_grad() #Forward propagation of the model, i.e.", "np.mean(train_accu) print(epoch, accuracy_epoch) # # Save and load the entire model. # torch.save(model,", "Test Set model.eval() test_accu = [] for images, labels in test_loader: data, target", "output[i,:,:] / (i+1) # prediction[i] = output[i,:,:].data.max(1)[1] # first column has actual prob", "= 1 model.train() train_loss = [] # In[6]: #Train Model for epoch in", "= nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv3_bn = nn.BatchNorm2d(64) self.conv4 = nn.Conv2d(64, 64,", "prediction[i].eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) test_accu = np.asarray(test_accu).reshape((10000/50,50)) accuracy_test = np.mean(test_accu, axis = 0)", "as nn import torch.nn.functional as F import torch.optim as optim from torch.autograd import", "download=False) # Data loader (this provides queues and threads in a very simple", "target = Variable(images).cuda(), Variable(labels).cuda() # optimizer.zero_grad() output[0,:,:] = model(data).data for i in range(1,50):", "specified. x = F.relu(self.conv1_bn(self.conv1(x))) x = F.relu(self.conv2(x)) x = F.max_pool2d(x, kernel_size=2,stride=2) x =", "]: # # coding: utf-8 # # In[1]: import numpy as np import", "#Stochastic gradient descent optimizer optimizer = optim.RMSprop(model.parameters(), lr=0.0001) num_epochs = 1 model.train() train_loss", "= torchvision.datasets.CIFAR10(root='./data/', train=False, transform=transforms.ToTensor(), download=False) # Data loader (this provides queues and threads", "= np.mean(train_accu) print(epoch, accuracy_epoch) # # Save and load the entire model. #", "# # Save and load the entire model. # torch.save(model, 'model.ckpt') # model", "accuracy_test = np.mean(test_accu) print(accuracy_test) # # In[51]: # #Calculate accuracy of trained model", "store the model parameters. self.conv1 = nn.Conv2d(3, 64, kernel_size=4, stride=1, padding=2 ) self.conv1_bn", "state['step']>=1024): state['step'] = 1000 optimizer.step() #Calculate accuracy on the training set. prediction =", "CIFAR-10 dataset. test_dataset = torchvision.datasets.CIFAR10(root='./data/', train=False, transform=transforms.ToTensor(), download=False) # Data loader (this provides", "is 3x32x32 #These variables store the model parameters. self.conv1 = nn.Conv2d(3, 64, kernel_size=4,", "self.fc1 = nn.Linear(64 * 5 * 5, H) self.fc2 = nn.Linear(H, H) self.fc3", "( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 train_accu.append(accuracy) accuracy_epoch = np.mean(train_accu) print(epoch, accuracy_epoch) # #", "torch.save(model, 'model.ckpt') # model = torch.load('model.ckpt') # In[ ]: # Download and construct", "in a very simple way). test_loader = torch.utils.data.DataLoader(dataset=test_dataset, batch_size=batch_size, shuffle=False) # When iteration", "import h5py import time import copy from random import randint import torch import", "nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv9_bn = nn.BatchNorm2d(64) self.conv9_drop = nn.Dropout2d() self.fc1 =", "= output[i-1,:,:] + model(data).data for i in range(50): output[i,:,:] = output[i,:,:] / (i+1)", "output[i,:,:].data.max(1)[1] # first column has actual prob import pdb; pdb.set_trace() prediction[i] = torch.max(output[i,:,:],1)", "model.train() train_loss = [] # In[6]: #Train Model for epoch in range(num_epochs): train_accu", "import torch.nn as nn import torch.nn.functional as F import torch.optim as optim from", "nn.BatchNorm2d(64) self.conv8 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv8_bn = nn.BatchNorm2d(64) self.conv8_drop =", "Download and construct CIFAR-10 dataset. train_dataset = torchvision.datasets.CIFAR10(root='./data/', train=True, transform=transforms.Compose([transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.ColorJitter(brightness=0.4), transforms.ToTensor()]),", "x = F.relu(self.conv7_bn(self.conv7(x))) x = F.relu(self.conv8_bn(self.conv8(x))) # x = self.conv8_drop(x) x = F.relu(self.conv9_bn(self.conv9(x)))", "= torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True) # When iteration starts, queue and thread start to", "gradients to zero when there’s a new batch of data. optimizer.zero_grad() #Forward propagation", ") self.conv4_drop = nn.Dropout2d() self.conv5 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv5_bn =", "x = F.relu(self.conv8_bn(self.conv8(x))) # x = self.conv8_drop(x) x = F.relu(self.conv9_bn(self.conv9(x))) # x =", "parameters for the model are updated using stochastic gradient descent. for group in", ") /float(batch_size))*100.0 train_accu.append(accuracy) accuracy_epoch = np.mean(train_accu) print(epoch, accuracy_epoch) # # Save and load", "torchvision.datasets.CIFAR10(root='./data/', train=False, transform=transforms.ToTensor(), download=False) # Data loader (this provides queues and threads in", "F.relu(self.conv1_bn(self.conv1(x))) x = F.relu(self.conv2(x)) x = F.max_pool2d(x, kernel_size=2,stride=2) x = self.conv2_drop(x) x =", "( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) accuracy_test = np.mean(test_accu) print(accuracy_test) # # In[51]:", "import randint import torch import torchvision import torchvision.transforms as transforms import torch.nn as", "from files. data_iter = iter(train_loader) # Mini-batch images and labels. images, labels =", "Data loader (this provides queues and threads in a very simple way). test_loader", "pdb; pdb.set_trace() prediction[i] = torch.max(output[i,:,:],1) accuracy[i] = ( float( prediction[i].eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy)", "num_epochs = 1 model.train() train_loss = [] # In[6]: #Train Model for epoch", "#Here is where the network is specified. x = F.relu(self.conv1_bn(self.conv1(x))) x = F.relu(self.conv2(x))", "kernel_size=2,stride=2) # x = self.conv4_drop(x) x = F.relu(self.conv5_bn(self.conv5(x))) x = F.relu(self.conv6(x)) # x", "descent. for group in optimizer.param_groups: for p in group['params']: state = optimizer.state[p] if('step'", "self.conv8_drop = nn.Dropout2d() self.conv9 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv9_bn = nn.BatchNorm2d(64)", "# In[4]: #number of hidden units H = 500 #Model architecture class CIFAR10Model(nn.Module):", "1 model.train() train_loss = [] # In[6]: #Train Model for epoch in range(num_epochs):", "= F.relu(self.conv9_bn(self.conv9(x))) # x = self.conv9_drop(x) x = x.view(x.size(0), -1) x = F.relu(self.fc1(x))", "torchvision.datasets.CIFAR10(root='./data/', train=True, transform=transforms.Compose([transforms.RandomHorizontalFlip(), transforms.RandomVerticalFlip(), transforms.ColorJitter(brightness=0.4), transforms.ToTensor()]), download=False) # Data loader (this provides queues", "= torch.zeros((50,1)) test_accu = [] for images, labels in test_loader: data, target =", "Variable(images).cuda(), Variable(labels).cuda() #PyTorch \"accumulates gradients\", so we need to set the stored gradients", "torch.nn as nn import torch.nn.functional as F import torch.optim as optim from torch.autograd", "import torchvision import torchvision.transforms as transforms import torch.nn as nn import torch.nn.functional as", "files. data_iter = iter(train_loader) # Mini-batch images and labels. images, labels = data_iter.next()", "transforms.ColorJitter(brightness=0.4), transforms.ToTensor()]), download=False) # Data loader (this provides queues and threads in a", "# input is 3x32x32 #These variables store the model parameters. self.conv1 = nn.Conv2d(3,", "#Train Model for epoch in range(num_epochs): train_accu = [] for images, labels in", "data. optimizer.zero_grad() #Forward propagation of the model, i.e. calculate the hidden units and", "the Test Set # # model.eval() output = torch.zeros((50,50,10)) prediction = torch.zeros((50,1)) accuracy", "x = x.view(x.size(0), -1) x = F.relu(self.fc1(x)) x = F.relu(self.fc2(x)) x = self.fc3(x)", "# x = self.conv8_drop(x) x = F.relu(self.conv9_bn(self.conv9(x))) # x = self.conv9_drop(x) x =", "the training set. prediction = output.data.max(1)[1] # first column has actual prob. accuracy", "# x = self.conv6_drop(x) x = F.relu(self.conv7_bn(self.conv7(x))) x = F.relu(self.conv8_bn(self.conv8(x))) # x =", "output = model(data) loss = F.nll_loss(output, target) prediction = output.data.max(1)[1] # first column", "data from files. data_iter = iter(train_loader) # Mini-batch images and labels. images, labels", "kernel_size=4,stride=1, padding=2 ) self.conv2_drop = nn.Dropout2d() self.conv3 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 )", "copy from random import randint import torch import torchvision import torchvision.transforms as transforms", "loader (this provides queues and threads in a very simple way). test_loader =", "kernel_size=2,stride=2) x = self.conv2_drop(x) x = F.relu(self.conv3_bn(self.conv3(x))) x = F.relu(self.conv4(x)) x = F.max_pool2d(x,", "epoch in range(num_epochs): train_accu = [] for images, labels in train_loader: data, target", "of data. optimizer.zero_grad() #Forward propagation of the model, i.e. calculate the hidden units", "= torch.zeros((50,1)) accuracy = torch.zeros((50,1)) test_accu = [] for images, labels in test_loader:", "= nn.Dropout2d() self.conv7 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv7_bn = nn.BatchNorm2d(64) self.conv8", "in a very simple way). train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True) # When iteration", "time import copy from random import randint import torch import torchvision import torchvision.transforms", "first column has actual prob import pdb; pdb.set_trace() prediction[i] = torch.max(output[i,:,:],1) accuracy[i] =", "first column has actual prob. accuracy = ( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 train_accu.append(accuracy)", "= nn.Dropout2d() self.conv5 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv5_bn = nn.BatchNorm2d(64) self.conv6", "nn.Linear(H, 10) def forward(self, x): #Here is where the network is specified. x", "= model(data).data for i in range(1,50): output[i,:,:] = output[i-1,:,:] + model(data).data for i", "F import torch.optim as optim from torch.autograd import Variable # In[2]: batch_size =", "has actual prob. accuracy = ( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 train_accu.append(accuracy) accuracy_epoch =", "iter(test_loader) # Mini-batch images and labels. images, labels = data_iter.next() # # In[", "= self.conv6_drop(x) x = F.relu(self.conv7_bn(self.conv7(x))) x = F.relu(self.conv8_bn(self.conv8(x))) # x = self.conv8_drop(x) x", "labels = data_iter.next() # # In[ ]: #Calculate accuracy of trained model on", "= data_iter.next() # # In[ ]: #Calculate accuracy of trained model on the", "64, kernel_size=4,stride=1, padding=2 ) self.conv4_drop = nn.Dropout2d() self.conv5 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2", "nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv4_drop = nn.Dropout2d() self.conv5 = nn.Conv2d(64, 64, kernel_size=4,stride=1,", "random import randint import torch import torchvision import torchvision.transforms as transforms import torch.nn", "optimizer.param_groups: for p in group['params']: state = optimizer.state[p] if('step' in state and state['step']>=1024):", "output. output = model(data) #The objective function is the negative log-likelihood function. loss", "= F.max_pool2d(x, kernel_size=2,stride=2) # x = self.conv4_drop(x) x = F.relu(self.conv5_bn(self.conv5(x))) x = F.relu(self.conv6(x))", "= model(data) #The objective function is the negative log-likelihood function. loss = F.nll_loss(output,", "optim from torch.autograd import Variable # In[2]: batch_size = 50 # Download and", "for the model are updated using stochastic gradient descent. for group in optimizer.param_groups:", "[] for images, labels in test_loader: data, target = Variable(images).cuda(), Variable(labels).cuda() # optimizer.zero_grad()", "prediction.eq(target.data).sum() ) /float(batch_size))*100.0 train_accu.append(accuracy) accuracy_epoch = np.mean(train_accu) print(epoch, accuracy_epoch) # # Save and", "Mini-batch images and labels. images, labels = data_iter.next() # In[3]: # In[4]: #number", "padding=2 ) self.conv2_drop = nn.Dropout2d() self.conv3 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv3_bn", ") self.conv9_bn = nn.BatchNorm2d(64) self.conv9_drop = nn.Dropout2d() self.fc1 = nn.Linear(64 * 5 *", "where the network is specified. x = F.relu(self.conv1_bn(self.conv1(x))) x = F.relu(self.conv2(x)) x =", "of the model, i.e. calculate the hidden units and the output. output =", "the hidden units and the output. output = model(data) #The objective function is", "accuracy[i] = ( float( prediction[i].eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) test_accu = np.asarray(test_accu).reshape((10000/50,50)) accuracy_test =", "i in range(50): output[i,:,:] = output[i,:,:] / (i+1) # prediction[i] = output[i,:,:].data.max(1)[1] #", "torch.optim as optim from torch.autograd import Variable # In[2]: batch_size = 50 #", "is where the network is specified. x = F.relu(self.conv1_bn(self.conv1(x))) x = F.relu(self.conv2(x)) x", "= F.relu(self.conv6(x)) # x = self.conv6_drop(x) x = F.relu(self.conv7_bn(self.conv7(x))) x = F.relu(self.conv8_bn(self.conv8(x))) #", "5 * 5, H) self.fc2 = nn.Linear(H, H) self.fc3 = nn.Linear(H, 10) def", "data_iter = iter(test_loader) # Mini-batch images and labels. images, labels = data_iter.next() #", "x = self.conv6_drop(x) x = F.relu(self.conv7_bn(self.conv7(x))) x = F.relu(self.conv8_bn(self.conv8(x))) # x = self.conv8_drop(x)", "model.cuda() # In[5]: #Stochastic gradient descent optimizer optimizer = optim.RMSprop(model.parameters(), lr=0.0001) num_epochs =", "x = F.relu(self.conv9_bn(self.conv9(x))) # x = self.conv9_drop(x) x = x.view(x.size(0), -1) x =", "#Calculate accuracy of trained model on the Test Set # # model.eval() output", "optimizer = optim.RMSprop(model.parameters(), lr=0.0001) num_epochs = 1 model.train() train_loss = [] # In[6]:", "trained model on the Test Set model.eval() test_accu = [] for images, labels", "accuracy = torch.zeros((50,1)) test_accu = [] for images, labels in test_loader: data, target", "CIFAR10Model() model.cuda() # In[5]: #Stochastic gradient descent optimizer optimizer = optim.RMSprop(model.parameters(), lr=0.0001) num_epochs", "# Mini-batch images and labels. images, labels = data_iter.next() # In[3]: # In[4]:", "= nn.BatchNorm2d(64) self.conv4 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv4_drop = nn.Dropout2d() self.conv5", "= nn.Conv2d(3, 64, kernel_size=4, stride=1, padding=2 ) self.conv1_bn = nn.BatchNorm2d(64) self.conv2 = nn.Conv2d(64,", "self.conv6 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv6_drop = nn.Dropout2d() self.conv7 = nn.Conv2d(64,", "Save and load the entire model. # torch.save(model, 'model.ckpt') # model = torch.load('model.ckpt')", "optimizer optimizer = optim.RMSprop(model.parameters(), lr=0.0001) num_epochs = 1 model.train() train_loss = [] #", "torch.utils.data.DataLoader(dataset=test_dataset, batch_size=batch_size, shuffle=False) # When iteration starts, queue and thread start to load", "self.conv6_drop(x) x = F.relu(self.conv7_bn(self.conv7(x))) x = F.relu(self.conv8_bn(self.conv8(x))) # x = self.conv8_drop(x) x =", "parameters. self.conv1 = nn.Conv2d(3, 64, kernel_size=4, stride=1, padding=2 ) self.conv1_bn = nn.BatchNorm2d(64) self.conv2", "self.conv8 = nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv8_bn = nn.BatchNorm2d(64) self.conv8_drop = nn.Dropout2d()", "= nn.Linear(H, 10) def forward(self, x): #Here is where the network is specified.", "prediction[i] = torch.max(output[i,:,:],1) accuracy[i] = ( float( prediction[i].eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) test_accu =", "nn.Conv2d(64, 64, kernel_size=3,stride=1, padding=0 ) self.conv6_drop = nn.Dropout2d() self.conv7 = nn.Conv2d(64, 64, kernel_size=3,stride=1,", "x = self.conv4_drop(x) x = F.relu(self.conv5_bn(self.conv5(x))) x = F.relu(self.conv6(x)) # x = self.conv6_drop(x)", "float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) accuracy_test = np.mean(test_accu) print(accuracy_test) # # In[51]: #", "function is the negative log-likelihood function. loss = F.nll_loss(output, target) #This calculates the", "nn.Dropout2d() self.conv5 = nn.Conv2d(64, 64, kernel_size=4,stride=1, padding=2 ) self.conv5_bn = nn.BatchNorm2d(64) self.conv6 =", "x = F.max_pool2d(x, kernel_size=2,stride=2) # x = self.conv4_drop(x) x = F.relu(self.conv5_bn(self.conv5(x))) x =", "accuracy = ( float( prediction.eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) accuracy_test = np.mean(test_accu) print(accuracy_test) #", "= ( float( prediction[i].eq(target.data).sum() ) /float(batch_size))*100.0 test_accu.append(accuracy) test_accu = np.asarray(test_accu).reshape((10000/50,50)) accuracy_test = np.mean(test_accu," ]
[ "if 'payer_bank_branch_name' in d: o.payer_bank_branch_name = d['payer_bank_branch_name'] if 'payer_inst_id' in d: o.payer_inst_id =", "business_scene(self): return self._business_scene @business_scene.setter def business_scene(self, value): self._business_scene = value @property def channel(self):", "if hasattr(self.writeoff_relative_id, 'to_alipay_dict'): params['writeoff_relative_id'] = self.writeoff_relative_id.to_alipay_dict() else: params['writeoff_relative_id'] = self.writeoff_relative_id return params @staticmethod", "self.tnt_inst_id if self.used_amt: if hasattr(self.used_amt, 'to_alipay_dict'): params['used_amt'] = self.used_amt.to_alipay_dict() else: params['used_amt'] = self.used_amt", "self.used_amt if self.writeoff_relative_id: if hasattr(self.writeoff_relative_id, 'to_alipay_dict'): params['writeoff_relative_id'] = self.writeoff_relative_id.to_alipay_dict() else: params['writeoff_relative_id'] = self.writeoff_relative_id", "= self.source if self.status: if hasattr(self.status, 'to_alipay_dict'): params['status'] = self.status.to_alipay_dict() else: params['status'] =", "self.bsn_ref_no if self.business_scene: if hasattr(self.business_scene, 'to_alipay_dict'): params['business_scene'] = self.business_scene.to_alipay_dict() else: params['business_scene'] = self.business_scene", "self.gmt_create.to_alipay_dict() else: params['gmt_create'] = self.gmt_create if self.gmt_modified: if hasattr(self.gmt_modified, 'to_alipay_dict'): params['gmt_modified'] = self.gmt_modified.to_alipay_dict()", "@property def used_amt(self): return self._used_amt @used_amt.setter def used_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._used_amt", "self.payee_account_no: if hasattr(self.payee_account_no, 'to_alipay_dict'): params['payee_account_no'] = self.payee_account_no.to_alipay_dict() else: params['payee_account_no'] = self.payee_account_no if self.payee_inst_id:", "= self.writeoff_relative_id return params @staticmethod def from_alipay_dict(d): if not d: return None o", "= None self._channel_memo = None self._collect_amt = None self._collect_date = None self._collect_status =", "d['business_scene'] if 'channel' in d: o.channel = d['channel'] if 'channel_log_no' in d: o.channel_log_no", "value): self._collect_date = value @property def collect_status(self): return self._collect_status @collect_status.setter def collect_status(self, value):", "self.gmt_create if self.gmt_modified: if hasattr(self.gmt_modified, 'to_alipay_dict'): params['gmt_modified'] = self.gmt_modified.to_alipay_dict() else: params['gmt_modified'] = self.gmt_modified", "= d['payer_account_no'] if 'payer_bank_branch_name' in d: o.payer_bank_branch_name = d['payer_bank_branch_name'] if 'payer_inst_id' in d:", "if hasattr(self.gl_exchange_rate, 'to_alipay_dict'): params['gl_exchange_rate'] = self.gl_exchange_rate.to_alipay_dict() else: params['gl_exchange_rate'] = self.gl_exchange_rate if self.gmt_create: if", "= d['collected_amt'] if 'creator' in d: o.creator = d['creator'] if 'freeze_amt' in d:", "self._collect_amt = None self._collect_date = None self._collect_status = None self._collected_amt = None self._creator", "if self.fund_log_id: if hasattr(self.fund_log_id, 'to_alipay_dict'): params['fund_log_id'] = self.fund_log_id.to_alipay_dict() else: params['fund_log_id'] = self.fund_log_id if", "self._payer_inst_id @payer_inst_id.setter def payer_inst_id(self, value): self._payer_inst_id = value @property def payer_ip_role_id(self): return self._payer_ip_role_id", "value): self._payer_inst_id = value @property def payer_ip_role_id(self): return self._payer_ip_role_id @payer_ip_role_id.setter def payer_ip_role_id(self, value):", "'to_alipay_dict'): params['fund_log_id'] = self.fund_log_id.to_alipay_dict() else: params['fund_log_id'] = self.fund_log_id if self.gl_exchange_rate: if hasattr(self.gl_exchange_rate, 'to_alipay_dict'):", "None o = CollectReceiptOpenApiDTO() if 'bsn_no' in d: o.bsn_no = d['bsn_no'] if 'bsn_ref_no'", "self.receipt_no if self.ref_trans_no: if hasattr(self.ref_trans_no, 'to_alipay_dict'): params['ref_trans_no'] = self.ref_trans_no.to_alipay_dict() else: params['ref_trans_no'] = self.ref_trans_no", "= d['payer_account_name'] if 'payer_account_no' in d: o.payer_account_no = d['payer_account_no'] if 'payer_bank_branch_name' in d:", "self._payer_bank_branch_name @payer_bank_branch_name.setter def payer_bank_branch_name(self, value): self._payer_bank_branch_name = value @property def payer_inst_id(self): return self._payer_inst_id", "self.collect_amt if self.collect_date: if hasattr(self.collect_date, 'to_alipay_dict'): params['collect_date'] = self.collect_date.to_alipay_dict() else: params['collect_date'] = self.collect_date", "def payer_account_no(self, value): self._payer_account_no = value @property def payer_bank_branch_name(self): return self._payer_bank_branch_name @payer_bank_branch_name.setter def", "self.receipt_no: if hasattr(self.receipt_no, 'to_alipay_dict'): params['receipt_no'] = self.receipt_no.to_alipay_dict() else: params['receipt_no'] = self.receipt_no if self.ref_trans_no:", "value @property def collect_status(self): return self._collect_status @collect_status.setter def collect_status(self, value): self._collect_status = value", "collected_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collected_amt = value else: self._collected_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property", "d['freeze_amt'] if 'fund_log_id' in d: o.fund_log_id = d['fund_log_id'] if 'gl_exchange_rate' in d: o.gl_exchange_rate", "self._channel_memo = None self._collect_amt = None self._collect_date = None self._collect_status = None self._collected_amt", "self.payer_bank_branch_name if self.payer_inst_id: if hasattr(self.payer_inst_id, 'to_alipay_dict'): params['payer_inst_id'] = self.payer_inst_id.to_alipay_dict() else: params['payer_inst_id'] = self.payer_inst_id", "o.freeze_amt = d['freeze_amt'] if 'fund_log_id' in d: o.fund_log_id = d['fund_log_id'] if 'gl_exchange_rate' in", "params['collect_date'] = self.collect_date if self.collect_status: if hasattr(self.collect_status, 'to_alipay_dict'): params['collect_status'] = self.collect_status.to_alipay_dict() else: params['collect_status']", "else: params['payee_account_name'] = self.payee_account_name if self.payee_account_no: if hasattr(self.payee_account_no, 'to_alipay_dict'): params['payee_account_no'] = self.payee_account_no.to_alipay_dict() else:", "CollectReceiptOpenApiDTO() if 'bsn_no' in d: o.bsn_no = d['bsn_no'] if 'bsn_ref_no' in d: o.bsn_ref_no", "collect_date(self, value): self._collect_date = value @property def collect_status(self): return self._collect_status @collect_status.setter def collect_status(self,", "def used_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._used_amt = value else: self._used_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value)", "def payer_account_name(self): return self._payer_account_name @payer_account_name.setter def payer_account_name(self, value): self._payer_account_name = value @property def", "else: params['bsn_ref_no'] = self.bsn_ref_no if self.business_scene: if hasattr(self.business_scene, 'to_alipay_dict'): params['business_scene'] = self.business_scene.to_alipay_dict() else:", "if 'channel_memo' in d: o.channel_memo = d['channel_memo'] if 'collect_amt' in d: o.collect_amt =", "isinstance(value, MultiCurrencyMoneyOpenApi): self._freeze_amt = value else: self._freeze_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def fund_log_id(self): return", "self.payer_ip_role_id.to_alipay_dict() else: params['payer_ip_role_id'] = self.payer_ip_role_id if self.receipt_no: if hasattr(self.receipt_no, 'to_alipay_dict'): params['receipt_no'] = self.receipt_no.to_alipay_dict()", "def creator(self): return self._creator @creator.setter def creator(self, value): self._creator = value @property def", "= None @property def bsn_no(self): return self._bsn_no @bsn_no.setter def bsn_no(self, value): self._bsn_no =", "d['payee_inst_id'] if 'payee_ip_role_id' in d: o.payee_ip_role_id = d['payee_ip_role_id'] if 'payer_account_name' in d: o.payer_account_name", "= d['channel_log_no'] if 'channel_memo' in d: o.channel_memo = d['channel_memo'] if 'collect_amt' in d:", "hasattr(self.payee_account_name, 'to_alipay_dict'): params['payee_account_name'] = self.payee_account_name.to_alipay_dict() else: params['payee_account_name'] = self.payee_account_name if self.payee_account_no: if hasattr(self.payee_account_no,", "self.source if self.status: if hasattr(self.status, 'to_alipay_dict'): params['status'] = self.status.to_alipay_dict() else: params['status'] = self.status", "params['tnt_inst_id'] = self.tnt_inst_id.to_alipay_dict() else: params['tnt_inst_id'] = self.tnt_inst_id if self.used_amt: if hasattr(self.used_amt, 'to_alipay_dict'): params['used_amt']", "if 'bsn_ref_no' in d: o.bsn_ref_no = d['bsn_ref_no'] if 'business_scene' in d: o.business_scene =", "self.payee_account_name.to_alipay_dict() else: params['payee_account_name'] = self.payee_account_name if self.payee_account_no: if hasattr(self.payee_account_no, 'to_alipay_dict'): params['payee_account_no'] = self.payee_account_no.to_alipay_dict()", "= d['gmt_create'] if 'gmt_modified' in d: o.gmt_modified = d['gmt_modified'] if 'payee_account_name' in d:", "self._payer_account_no = None self._payer_bank_branch_name = None self._payer_inst_id = None self._payer_ip_role_id = None self._receipt_no", "None self._ref_trans_no_type = None self._source = None self._status = None self._tnt_inst_id = None", "def payer_account_name(self, value): self._payer_account_name = value @property def payer_account_no(self): return self._payer_account_no @payer_account_no.setter def", "'to_alipay_dict'): params['bsn_no'] = self.bsn_no.to_alipay_dict() else: params['bsn_no'] = self.bsn_no if self.bsn_ref_no: if hasattr(self.bsn_ref_no, 'to_alipay_dict'):", "self.business_scene.to_alipay_dict() else: params['business_scene'] = self.business_scene if self.channel: if hasattr(self.channel, 'to_alipay_dict'): params['channel'] = self.channel.to_alipay_dict()", "if hasattr(self.collected_amt, 'to_alipay_dict'): params['collected_amt'] = self.collected_amt.to_alipay_dict() else: params['collected_amt'] = self.collected_amt if self.creator: if", "collect_date(self): return self._collect_date @collect_date.setter def collect_date(self, value): self._collect_date = value @property def collect_status(self):", "self._gmt_create = value @property def gmt_modified(self): return self._gmt_modified @gmt_modified.setter def gmt_modified(self, value): self._gmt_modified", "if hasattr(self.receipt_no, 'to_alipay_dict'): params['receipt_no'] = self.receipt_no.to_alipay_dict() else: params['receipt_no'] = self.receipt_no if self.ref_trans_no: if", "self.status.to_alipay_dict() else: params['status'] = self.status if self.tnt_inst_id: if hasattr(self.tnt_inst_id, 'to_alipay_dict'): params['tnt_inst_id'] = self.tnt_inst_id.to_alipay_dict()", "= self.payer_inst_id if self.payer_ip_role_id: if hasattr(self.payer_ip_role_id, 'to_alipay_dict'): params['payer_ip_role_id'] = self.payer_ip_role_id.to_alipay_dict() else: params['payer_ip_role_id'] =", "= None self._status = None self._tnt_inst_id = None self._used_amt = None self._writeoff_relative_id =", "self.business_scene: if hasattr(self.business_scene, 'to_alipay_dict'): params['business_scene'] = self.business_scene.to_alipay_dict() else: params['business_scene'] = self.business_scene if self.channel:", "o.gmt_modified = d['gmt_modified'] if 'payee_account_name' in d: o.payee_account_name = d['payee_account_name'] if 'payee_account_no' in", "= None self._fund_log_id = None self._gl_exchange_rate = None self._gmt_create = None self._gmt_modified =", "self._business_scene = value @property def channel(self): return self._channel @channel.setter def channel(self, value): self._channel", "self._collect_status @collect_status.setter def collect_status(self, value): self._collect_status = value @property def collected_amt(self): return self._collected_amt", "params['payee_account_name'] = self.payee_account_name if self.payee_account_no: if hasattr(self.payee_account_no, 'to_alipay_dict'): params['payee_account_no'] = self.payee_account_no.to_alipay_dict() else: params['payee_account_no']", "else: params['gl_exchange_rate'] = self.gl_exchange_rate if self.gmt_create: if hasattr(self.gmt_create, 'to_alipay_dict'): params['gmt_create'] = self.gmt_create.to_alipay_dict() else:", "d: o.fund_log_id = d['fund_log_id'] if 'gl_exchange_rate' in d: o.gl_exchange_rate = d['gl_exchange_rate'] if 'gmt_create'", "self.payer_ip_role_id if self.receipt_no: if hasattr(self.receipt_no, 'to_alipay_dict'): params['receipt_no'] = self.receipt_no.to_alipay_dict() else: params['receipt_no'] = self.receipt_no", "= self.collect_date.to_alipay_dict() else: params['collect_date'] = self.collect_date if self.collect_status: if hasattr(self.collect_status, 'to_alipay_dict'): params['collect_status'] =", "o.payer_bank_branch_name = d['payer_bank_branch_name'] if 'payer_inst_id' in d: o.payer_inst_id = d['payer_inst_id'] if 'payer_ip_role_id' in", "self._payee_account_no = None self._payee_inst_id = None self._payee_ip_role_id = None self._payer_account_name = None self._payer_account_no", "params['receipt_no'] = self.receipt_no if self.ref_trans_no: if hasattr(self.ref_trans_no, 'to_alipay_dict'): params['ref_trans_no'] = self.ref_trans_no.to_alipay_dict() else: params['ref_trans_no']", "value @property def freeze_amt(self): return self._freeze_amt @freeze_amt.setter def freeze_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi):", "o.collect_date = d['collect_date'] if 'collect_status' in d: o.collect_status = d['collect_status'] if 'collected_amt' in", "else: params['collect_date'] = self.collect_date if self.collect_status: if hasattr(self.collect_status, 'to_alipay_dict'): params['collect_status'] = self.collect_status.to_alipay_dict() else:", "value @property def payee_inst_id(self): return self._payee_inst_id @payee_inst_id.setter def payee_inst_id(self, value): self._payee_inst_id = value", "self._payer_account_no = value @property def payer_bank_branch_name(self): return self._payer_bank_branch_name @payer_bank_branch_name.setter def payer_bank_branch_name(self, value): self._payer_bank_branch_name", "self.gmt_create: if hasattr(self.gmt_create, 'to_alipay_dict'): params['gmt_create'] = self.gmt_create.to_alipay_dict() else: params['gmt_create'] = self.gmt_create if self.gmt_modified:", "if self.payer_ip_role_id: if hasattr(self.payer_ip_role_id, 'to_alipay_dict'): params['payer_ip_role_id'] = self.payer_ip_role_id.to_alipay_dict() else: params['payer_ip_role_id'] = self.payer_ip_role_id if", "= value @property def payer_bank_branch_name(self): return self._payer_bank_branch_name @payer_bank_branch_name.setter def payer_bank_branch_name(self, value): self._payer_bank_branch_name =", "o = CollectReceiptOpenApiDTO() if 'bsn_no' in d: o.bsn_no = d['bsn_no'] if 'bsn_ref_no' in", "else: params['payer_ip_role_id'] = self.payer_ip_role_id if self.receipt_no: if hasattr(self.receipt_no, 'to_alipay_dict'): params['receipt_no'] = self.receipt_no.to_alipay_dict() else:", "hasattr(self.used_amt, 'to_alipay_dict'): params['used_amt'] = self.used_amt.to_alipay_dict() else: params['used_amt'] = self.used_amt if self.writeoff_relative_id: if hasattr(self.writeoff_relative_id,", "if 'gmt_modified' in d: o.gmt_modified = d['gmt_modified'] if 'payee_account_name' in d: o.payee_account_name =", "None self._collect_status = None self._collected_amt = None self._creator = None self._freeze_amt = None", "d: o.gl_exchange_rate = d['gl_exchange_rate'] if 'gmt_create' in d: o.gmt_create = d['gmt_create'] if 'gmt_modified'", "self._collected_amt @collected_amt.setter def collected_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collected_amt = value else: self._collected_amt", "self._writeoff_relative_id = None @property def bsn_no(self): return self._bsn_no @bsn_no.setter def bsn_no(self, value): self._bsn_no", "self._payee_account_name = value @property def payee_account_no(self): return self._payee_account_no @payee_account_no.setter def payee_account_no(self, value): self._payee_account_no", "self.collect_date.to_alipay_dict() else: params['collect_date'] = self.collect_date if self.collect_status: if hasattr(self.collect_status, 'to_alipay_dict'): params['collect_status'] = self.collect_status.to_alipay_dict()", "def collect_date(self): return self._collect_date @collect_date.setter def collect_date(self, value): self._collect_date = value @property def", "None self._channel_memo = None self._collect_amt = None self._collect_date = None self._collect_status = None", "self._used_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def writeoff_relative_id(self): return self._writeoff_relative_id @writeoff_relative_id.setter def writeoff_relative_id(self, value): self._writeoff_relative_id", "value @property def channel_memo(self): return self._channel_memo @channel_memo.setter def channel_memo(self, value): self._channel_memo = value", "self._creator @creator.setter def creator(self, value): self._creator = value @property def freeze_amt(self): return self._freeze_amt", "d: o.gmt_modified = d['gmt_modified'] if 'payee_account_name' in d: o.payee_account_name = d['payee_account_name'] if 'payee_account_no'", "self._payer_ip_role_id = value @property def receipt_no(self): return self._receipt_no @receipt_no.setter def receipt_no(self, value): self._receipt_no", "if 'source' in d: o.source = d['source'] if 'status' in d: o.status =", "freeze_amt(self): return self._freeze_amt @freeze_amt.setter def freeze_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._freeze_amt = value", "channel(self): return self._channel @channel.setter def channel(self, value): self._channel = value @property def channel_log_no(self):", "None self._status = None self._tnt_inst_id = None self._used_amt = None self._writeoff_relative_id = None", "payee_account_name(self): return self._payee_account_name @payee_account_name.setter def payee_account_name(self, value): self._payee_account_name = value @property def payee_account_no(self):", "value): self._payer_ip_role_id = value @property def receipt_no(self): return self._receipt_no @receipt_no.setter def receipt_no(self, value):", "self.business_scene if self.channel: if hasattr(self.channel, 'to_alipay_dict'): params['channel'] = self.channel.to_alipay_dict() else: params['channel'] = self.channel", "value @property def payer_account_name(self): return self._payer_account_name @payer_account_name.setter def payer_account_name(self, value): self._payer_account_name = value", "value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._used_amt = value else: self._used_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def", "self.collected_amt if self.creator: if hasattr(self.creator, 'to_alipay_dict'): params['creator'] = self.creator.to_alipay_dict() else: params['creator'] = self.creator", "d['channel_memo'] if 'collect_amt' in d: o.collect_amt = d['collect_amt'] if 'collect_date' in d: o.collect_date", "params['channel_log_no'] = self.channel_log_no.to_alipay_dict() else: params['channel_log_no'] = self.channel_log_no if self.channel_memo: if hasattr(self.channel_memo, 'to_alipay_dict'): params['channel_memo']", "None self._collect_date = None self._collect_status = None self._collected_amt = None self._creator = None", "= d['gl_exchange_rate'] if 'gmt_create' in d: o.gmt_create = d['gmt_create'] if 'gmt_modified' in d:", "self.ref_trans_no: if hasattr(self.ref_trans_no, 'to_alipay_dict'): params['ref_trans_no'] = self.ref_trans_no.to_alipay_dict() else: params['ref_trans_no'] = self.ref_trans_no if self.ref_trans_no_type:", "= value @property def channel_memo(self): return self._channel_memo @channel_memo.setter def channel_memo(self, value): self._channel_memo =", "def writeoff_relative_id(self): return self._writeoff_relative_id @writeoff_relative_id.setter def writeoff_relative_id(self, value): self._writeoff_relative_id = value def to_alipay_dict(self):", "d['payee_ip_role_id'] if 'payer_account_name' in d: o.payer_account_name = d['payer_account_name'] if 'payer_account_no' in d: o.payer_account_no", "self._used_amt = None self._writeoff_relative_id = None @property def bsn_no(self): return self._bsn_no @bsn_no.setter def", "self._payee_account_name = None self._payee_account_no = None self._payee_inst_id = None self._payee_ip_role_id = None self._payer_account_name", "@ref_trans_no.setter def ref_trans_no(self, value): self._ref_trans_no = value @property def ref_trans_no_type(self): return self._ref_trans_no_type @ref_trans_no_type.setter", "d['bsn_ref_no'] if 'business_scene' in d: o.business_scene = d['business_scene'] if 'channel' in d: o.channel", "o.bsn_ref_no = d['bsn_ref_no'] if 'business_scene' in d: o.business_scene = d['business_scene'] if 'channel' in", "self.fund_log_id.to_alipay_dict() else: params['fund_log_id'] = self.fund_log_id if self.gl_exchange_rate: if hasattr(self.gl_exchange_rate, 'to_alipay_dict'): params['gl_exchange_rate'] = self.gl_exchange_rate.to_alipay_dict()", "if 'payee_account_no' in d: o.payee_account_no = d['payee_account_no'] if 'payee_inst_id' in d: o.payee_inst_id =", "if self.business_scene: if hasattr(self.business_scene, 'to_alipay_dict'): params['business_scene'] = self.business_scene.to_alipay_dict() else: params['business_scene'] = self.business_scene if", "self.payee_ip_role_id if self.payer_account_name: if hasattr(self.payer_account_name, 'to_alipay_dict'): params['payer_account_name'] = self.payer_account_name.to_alipay_dict() else: params['payer_account_name'] = self.payer_account_name", "in d: o.freeze_amt = d['freeze_amt'] if 'fund_log_id' in d: o.fund_log_id = d['fund_log_id'] if", "= d['collect_status'] if 'collected_amt' in d: o.collected_amt = d['collected_amt'] if 'creator' in d:", "self._payee_account_name @payee_account_name.setter def payee_account_name(self, value): self._payee_account_name = value @property def payee_account_no(self): return self._payee_account_no", "value): self._tnt_inst_id = value @property def used_amt(self): return self._used_amt @used_amt.setter def used_amt(self, value):", "None self._gl_exchange_rate = None self._gmt_create = None self._gmt_modified = None self._payee_account_name = None", "else: params['channel_memo'] = self.channel_memo if self.collect_amt: if hasattr(self.collect_amt, 'to_alipay_dict'): params['collect_amt'] = self.collect_amt.to_alipay_dict() else:", "'gmt_create' in d: o.gmt_create = d['gmt_create'] if 'gmt_modified' in d: o.gmt_modified = d['gmt_modified']", "'payer_inst_id' in d: o.payer_inst_id = d['payer_inst_id'] if 'payer_ip_role_id' in d: o.payer_ip_role_id = d['payer_ip_role_id']", "= None self._business_scene = None self._channel = None self._channel_log_no = None self._channel_memo =", "= None self._tnt_inst_id = None self._used_amt = None self._writeoff_relative_id = None @property def", "'to_alipay_dict'): params['payee_ip_role_id'] = self.payee_ip_role_id.to_alipay_dict() else: params['payee_ip_role_id'] = self.payee_ip_role_id if self.payer_account_name: if hasattr(self.payer_account_name, 'to_alipay_dict'):", "d: o.payee_account_name = d['payee_account_name'] if 'payee_account_no' in d: o.payee_account_no = d['payee_account_no'] if 'payee_inst_id'", "= self.used_amt.to_alipay_dict() else: params['used_amt'] = self.used_amt if self.writeoff_relative_id: if hasattr(self.writeoff_relative_id, 'to_alipay_dict'): params['writeoff_relative_id'] =", "value @property def payer_account_no(self): return self._payer_account_no @payer_account_no.setter def payer_account_no(self, value): self._payer_account_no = value", "def bsn_ref_no(self, value): self._bsn_ref_no = value @property def business_scene(self): return self._business_scene @business_scene.setter def", "payer_account_no(self, value): self._payer_account_no = value @property def payer_bank_branch_name(self): return self._payer_bank_branch_name @payer_bank_branch_name.setter def payer_bank_branch_name(self,", "else: params['payer_inst_id'] = self.payer_inst_id if self.payer_ip_role_id: if hasattr(self.payer_ip_role_id, 'to_alipay_dict'): params['payer_ip_role_id'] = self.payer_ip_role_id.to_alipay_dict() else:", "value @property def gmt_create(self): return self._gmt_create @gmt_create.setter def gmt_create(self, value): self._gmt_create = value", "params['fund_log_id'] = self.fund_log_id if self.gl_exchange_rate: if hasattr(self.gl_exchange_rate, 'to_alipay_dict'): params['gl_exchange_rate'] = self.gl_exchange_rate.to_alipay_dict() else: params['gl_exchange_rate']", "params @staticmethod def from_alipay_dict(d): if not d: return None o = CollectReceiptOpenApiDTO() if", "self._collect_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def collect_date(self): return self._collect_date @collect_date.setter def collect_date(self, value): self._collect_date", "self._freeze_amt = None self._fund_log_id = None self._gl_exchange_rate = None self._gmt_create = None self._gmt_modified", "value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collected_amt = value else: self._collected_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def", "if 'collect_date' in d: o.collect_date = d['collect_date'] if 'collect_status' in d: o.collect_status =", "self.payee_inst_id.to_alipay_dict() else: params['payee_inst_id'] = self.payee_inst_id if self.payee_ip_role_id: if hasattr(self.payee_ip_role_id, 'to_alipay_dict'): params['payee_ip_role_id'] = self.payee_ip_role_id.to_alipay_dict()", "self.payer_account_no: if hasattr(self.payer_account_no, 'to_alipay_dict'): params['payer_account_no'] = self.payer_account_no.to_alipay_dict() else: params['payer_account_no'] = self.payer_account_no if self.payer_bank_branch_name:", "'to_alipay_dict'): params['gl_exchange_rate'] = self.gl_exchange_rate.to_alipay_dict() else: params['gl_exchange_rate'] = self.gl_exchange_rate if self.gmt_create: if hasattr(self.gmt_create, 'to_alipay_dict'):", "= value @property def gmt_create(self): return self._gmt_create @gmt_create.setter def gmt_create(self, value): self._gmt_create =", "value): self._payer_bank_branch_name = value @property def payer_inst_id(self): return self._payer_inst_id @payer_inst_id.setter def payer_inst_id(self, value):", "self._payer_bank_branch_name = None self._payer_inst_id = None self._payer_ip_role_id = None self._receipt_no = None self._ref_trans_no", "hasattr(self.business_scene, 'to_alipay_dict'): params['business_scene'] = self.business_scene.to_alipay_dict() else: params['business_scene'] = self.business_scene if self.channel: if hasattr(self.channel,", "'to_alipay_dict'): params['payee_inst_id'] = self.payee_inst_id.to_alipay_dict() else: params['payee_inst_id'] = self.payee_inst_id if self.payee_ip_role_id: if hasattr(self.payee_ip_role_id, 'to_alipay_dict'):", "def to_alipay_dict(self): params = dict() if self.bsn_no: if hasattr(self.bsn_no, 'to_alipay_dict'): params['bsn_no'] = self.bsn_no.to_alipay_dict()", "self._used_amt @used_amt.setter def used_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._used_amt = value else: self._used_amt", "= self.payer_bank_branch_name if self.payer_inst_id: if hasattr(self.payer_inst_id, 'to_alipay_dict'): params['payer_inst_id'] = self.payer_inst_id.to_alipay_dict() else: params['payer_inst_id'] =", "= None self._ref_trans_no = None self._ref_trans_no_type = None self._source = None self._status =", "self.payer_bank_branch_name.to_alipay_dict() else: params['payer_bank_branch_name'] = self.payer_bank_branch_name if self.payer_inst_id: if hasattr(self.payer_inst_id, 'to_alipay_dict'): params['payer_inst_id'] = self.payer_inst_id.to_alipay_dict()", "params['ref_trans_no_type'] = self.ref_trans_no_type.to_alipay_dict() else: params['ref_trans_no_type'] = self.ref_trans_no_type if self.source: if hasattr(self.source, 'to_alipay_dict'): params['source']", "if self.payer_inst_id: if hasattr(self.payer_inst_id, 'to_alipay_dict'): params['payer_inst_id'] = self.payer_inst_id.to_alipay_dict() else: params['payer_inst_id'] = self.payer_inst_id if", "self._payer_account_name = value @property def payer_account_no(self): return self._payer_account_no @payer_account_no.setter def payer_account_no(self, value): self._payer_account_no", "None self._freeze_amt = None self._fund_log_id = None self._gl_exchange_rate = None self._gmt_create = None", "<gh_stars>100-1000 #!/usr/bin/env python # -*- coding: utf-8 -*- import json from alipay.aop.api.constant.ParamConstants import", "value @property def gl_exchange_rate(self): return self._gl_exchange_rate @gl_exchange_rate.setter def gl_exchange_rate(self, value): self._gl_exchange_rate = value", "return self._receipt_no @receipt_no.setter def receipt_no(self, value): self._receipt_no = value @property def ref_trans_no(self): return", "self.fund_log_id: if hasattr(self.fund_log_id, 'to_alipay_dict'): params['fund_log_id'] = self.fund_log_id.to_alipay_dict() else: params['fund_log_id'] = self.fund_log_id if self.gl_exchange_rate:", "if hasattr(self.source, 'to_alipay_dict'): params['source'] = self.source.to_alipay_dict() else: params['source'] = self.source if self.status: if", "self.source: if hasattr(self.source, 'to_alipay_dict'): params['source'] = self.source.to_alipay_dict() else: params['source'] = self.source if self.status:", "def gmt_modified(self, value): self._gmt_modified = value @property def payee_account_name(self): return self._payee_account_name @payee_account_name.setter def", "if self.payee_ip_role_id: if hasattr(self.payee_ip_role_id, 'to_alipay_dict'): params['payee_ip_role_id'] = self.payee_ip_role_id.to_alipay_dict() else: params['payee_ip_role_id'] = self.payee_ip_role_id if", "o.collect_status = d['collect_status'] if 'collected_amt' in d: o.collected_amt = d['collected_amt'] if 'creator' in", "@collect_date.setter def collect_date(self, value): self._collect_date = value @property def collect_status(self): return self._collect_status @collect_status.setter", "bsn_no(self): return self._bsn_no @bsn_no.setter def bsn_no(self, value): self._bsn_no = value @property def bsn_ref_no(self):", "self._collected_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def creator(self): return self._creator @creator.setter def creator(self, value): self._creator", "payer_ip_role_id(self, value): self._payer_ip_role_id = value @property def receipt_no(self): return self._receipt_no @receipt_no.setter def receipt_no(self,", "self._channel_log_no = value @property def channel_memo(self): return self._channel_memo @channel_memo.setter def channel_memo(self, value): self._channel_memo", "def receipt_no(self, value): self._receipt_no = value @property def ref_trans_no(self): return self._ref_trans_no @ref_trans_no.setter def", "#!/usr/bin/env python # -*- coding: utf-8 -*- import json from alipay.aop.api.constant.ParamConstants import *", "= None self._collect_date = None self._collect_status = None self._collected_amt = None self._creator =", "ref_trans_no(self, value): self._ref_trans_no = value @property def ref_trans_no_type(self): return self._ref_trans_no_type @ref_trans_no_type.setter def ref_trans_no_type(self,", "value else: self._used_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def writeoff_relative_id(self): return self._writeoff_relative_id @writeoff_relative_id.setter def writeoff_relative_id(self,", "params['collect_status'] = self.collect_status.to_alipay_dict() else: params['collect_status'] = self.collect_status if self.collected_amt: if hasattr(self.collected_amt, 'to_alipay_dict'): params['collected_amt']", "= self.fund_log_id if self.gl_exchange_rate: if hasattr(self.gl_exchange_rate, 'to_alipay_dict'): params['gl_exchange_rate'] = self.gl_exchange_rate.to_alipay_dict() else: params['gl_exchange_rate'] =", "params['payer_account_no'] = self.payer_account_no if self.payer_bank_branch_name: if hasattr(self.payer_bank_branch_name, 'to_alipay_dict'): params['payer_bank_branch_name'] = self.payer_bank_branch_name.to_alipay_dict() else: params['payer_bank_branch_name']", "= self.payer_account_name if self.payer_account_no: if hasattr(self.payer_account_no, 'to_alipay_dict'): params['payer_account_no'] = self.payer_account_no.to_alipay_dict() else: params['payer_account_no'] =", "self.collect_amt.to_alipay_dict() else: params['collect_amt'] = self.collect_amt if self.collect_date: if hasattr(self.collect_date, 'to_alipay_dict'): params['collect_date'] = self.collect_date.to_alipay_dict()", "if self.tnt_inst_id: if hasattr(self.tnt_inst_id, 'to_alipay_dict'): params['tnt_inst_id'] = self.tnt_inst_id.to_alipay_dict() else: params['tnt_inst_id'] = self.tnt_inst_id if", "return self._gmt_modified @gmt_modified.setter def gmt_modified(self, value): self._gmt_modified = value @property def payee_account_name(self): return", "def payee_inst_id(self): return self._payee_inst_id @payee_inst_id.setter def payee_inst_id(self, value): self._payee_inst_id = value @property def", "def business_scene(self, value): self._business_scene = value @property def channel(self): return self._channel @channel.setter def", "params['payer_ip_role_id'] = self.payer_ip_role_id if self.receipt_no: if hasattr(self.receipt_no, 'to_alipay_dict'): params['receipt_no'] = self.receipt_no.to_alipay_dict() else: params['receipt_no']", "return None o = CollectReceiptOpenApiDTO() if 'bsn_no' in d: o.bsn_no = d['bsn_no'] if", "if self.bsn_ref_no: if hasattr(self.bsn_ref_no, 'to_alipay_dict'): params['bsn_ref_no'] = self.bsn_ref_no.to_alipay_dict() else: params['bsn_ref_no'] = self.bsn_ref_no if", "def collected_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collected_amt = value else: self._collected_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value)", "d: o.channel = d['channel'] if 'channel_log_no' in d: o.channel_log_no = d['channel_log_no'] if 'channel_memo'", "import * from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import", "d: o.payer_inst_id = d['payer_inst_id'] if 'payer_ip_role_id' in d: o.payer_ip_role_id = d['payer_ip_role_id'] if 'receipt_no'", "else: self._collected_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def creator(self): return self._creator @creator.setter def creator(self, value):", "def payee_ip_role_id(self): return self._payee_ip_role_id @payee_ip_role_id.setter def payee_ip_role_id(self, value): self._payee_ip_role_id = value @property def", "None self._bsn_ref_no = None self._business_scene = None self._channel = None self._channel_log_no = None", "source(self): return self._source @source.setter def source(self, value): self._source = value @property def status(self):", "if self.payee_inst_id: if hasattr(self.payee_inst_id, 'to_alipay_dict'): params['payee_inst_id'] = self.payee_inst_id.to_alipay_dict() else: params['payee_inst_id'] = self.payee_inst_id if", "self._bsn_no @bsn_no.setter def bsn_no(self, value): self._bsn_no = value @property def bsn_ref_no(self): return self._bsn_ref_no", "'to_alipay_dict'): params['channel_log_no'] = self.channel_log_no.to_alipay_dict() else: params['channel_log_no'] = self.channel_log_no if self.channel_memo: if hasattr(self.channel_memo, 'to_alipay_dict'):", "self._gmt_create @gmt_create.setter def gmt_create(self, value): self._gmt_create = value @property def gmt_modified(self): return self._gmt_modified", "if 'channel' in d: o.channel = d['channel'] if 'channel_log_no' in d: o.channel_log_no =", "d['collect_status'] if 'collected_amt' in d: o.collected_amt = d['collected_amt'] if 'creator' in d: o.creator", "@gmt_modified.setter def gmt_modified(self, value): self._gmt_modified = value @property def payee_account_name(self): return self._payee_account_name @payee_account_name.setter", "self.channel: if hasattr(self.channel, 'to_alipay_dict'): params['channel'] = self.channel.to_alipay_dict() else: params['channel'] = self.channel if self.channel_log_no:", "self.ref_trans_no if self.ref_trans_no_type: if hasattr(self.ref_trans_no_type, 'to_alipay_dict'): params['ref_trans_no_type'] = self.ref_trans_no_type.to_alipay_dict() else: params['ref_trans_no_type'] = self.ref_trans_no_type", "from_alipay_dict(d): if not d: return None o = CollectReceiptOpenApiDTO() if 'bsn_no' in d:", "self._creator = None self._freeze_amt = None self._fund_log_id = None self._gl_exchange_rate = None self._gmt_create", "@property def payee_inst_id(self): return self._payee_inst_id @payee_inst_id.setter def payee_inst_id(self, value): self._payee_inst_id = value @property", "def tnt_inst_id(self, value): self._tnt_inst_id = value @property def used_amt(self): return self._used_amt @used_amt.setter def", "None self._business_scene = None self._channel = None self._channel_log_no = None self._channel_memo = None", "= self.payer_ip_role_id.to_alipay_dict() else: params['payer_ip_role_id'] = self.payer_ip_role_id if self.receipt_no: if hasattr(self.receipt_no, 'to_alipay_dict'): params['receipt_no'] =", "= self.payer_account_no if self.payer_bank_branch_name: if hasattr(self.payer_bank_branch_name, 'to_alipay_dict'): params['payer_bank_branch_name'] = self.payer_bank_branch_name.to_alipay_dict() else: params['payer_bank_branch_name'] =", "in d: o.channel = d['channel'] if 'channel_log_no' in d: o.channel_log_no = d['channel_log_no'] if", "params['collect_amt'] = self.collect_amt.to_alipay_dict() else: params['collect_amt'] = self.collect_amt if self.collect_date: if hasattr(self.collect_date, 'to_alipay_dict'): params['collect_date']", "def payer_inst_id(self): return self._payer_inst_id @payer_inst_id.setter def payer_inst_id(self, value): self._payer_inst_id = value @property def", "o.payer_account_no = d['payer_account_no'] if 'payer_bank_branch_name' in d: o.payer_bank_branch_name = d['payer_bank_branch_name'] if 'payer_inst_id' in", "= None self._ref_trans_no_type = None self._source = None self._status = None self._tnt_inst_id =", "alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi class CollectReceiptOpenApiDTO(object): def __init__(self): self._bsn_no = None self._bsn_ref_no = None", "hasattr(self.collect_amt, 'to_alipay_dict'): params['collect_amt'] = self.collect_amt.to_alipay_dict() else: params['collect_amt'] = self.collect_amt if self.collect_date: if hasattr(self.collect_date,", "if 'business_scene' in d: o.business_scene = d['business_scene'] if 'channel' in d: o.channel =", "in d: o.payee_account_no = d['payee_account_no'] if 'payee_inst_id' in d: o.payee_inst_id = d['payee_inst_id'] if", "self._gl_exchange_rate = None self._gmt_create = None self._gmt_modified = None self._payee_account_name = None self._payee_account_no", "@collect_amt.setter def collect_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collect_amt = value else: self._collect_amt =", "def payee_account_no(self, value): self._payee_account_no = value @property def payee_inst_id(self): return self._payee_inst_id @payee_inst_id.setter def", "in d: o.payee_ip_role_id = d['payee_ip_role_id'] if 'payer_account_name' in d: o.payer_account_name = d['payer_account_name'] if", "= d['payee_ip_role_id'] if 'payer_account_name' in d: o.payer_account_name = d['payer_account_name'] if 'payer_account_no' in d:", "if 'ref_trans_no' in d: o.ref_trans_no = d['ref_trans_no'] if 'ref_trans_no_type' in d: o.ref_trans_no_type =", "'to_alipay_dict'): params['payer_account_name'] = self.payer_account_name.to_alipay_dict() else: params['payer_account_name'] = self.payer_account_name if self.payer_account_no: if hasattr(self.payer_account_no, 'to_alipay_dict'):", "d: o.collect_status = d['collect_status'] if 'collected_amt' in d: o.collected_amt = d['collected_amt'] if 'creator'", "= self.payer_ip_role_id if self.receipt_no: if hasattr(self.receipt_no, 'to_alipay_dict'): params['receipt_no'] = self.receipt_no.to_alipay_dict() else: params['receipt_no'] =", "= self.collect_date if self.collect_status: if hasattr(self.collect_status, 'to_alipay_dict'): params['collect_status'] = self.collect_status.to_alipay_dict() else: params['collect_status'] =", "self._tnt_inst_id = None self._used_amt = None self._writeoff_relative_id = None @property def bsn_no(self): return", "def payer_ip_role_id(self): return self._payer_ip_role_id @payer_ip_role_id.setter def payer_ip_role_id(self, value): self._payer_ip_role_id = value @property def", "self.channel if self.channel_log_no: if hasattr(self.channel_log_no, 'to_alipay_dict'): params['channel_log_no'] = self.channel_log_no.to_alipay_dict() else: params['channel_log_no'] = self.channel_log_no", "else: params['channel'] = self.channel if self.channel_log_no: if hasattr(self.channel_log_no, 'to_alipay_dict'): params['channel_log_no'] = self.channel_log_no.to_alipay_dict() else:", "= d['bsn_no'] if 'bsn_ref_no' in d: o.bsn_ref_no = d['bsn_ref_no'] if 'business_scene' in d:", "return self._status @status.setter def status(self, value): self._status = value @property def tnt_inst_id(self): return", "value): self._fund_log_id = value @property def gl_exchange_rate(self): return self._gl_exchange_rate @gl_exchange_rate.setter def gl_exchange_rate(self, value):", "if isinstance(value, MultiCurrencyMoneyOpenApi): self._collected_amt = value else: self._collected_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def creator(self):", "def payee_inst_id(self, value): self._payee_inst_id = value @property def payee_ip_role_id(self): return self._payee_ip_role_id @payee_ip_role_id.setter def", "self._payee_inst_id = value @property def payee_ip_role_id(self): return self._payee_ip_role_id @payee_ip_role_id.setter def payee_ip_role_id(self, value): self._payee_ip_role_id", "payer_ip_role_id(self): return self._payer_ip_role_id @payer_ip_role_id.setter def payer_ip_role_id(self, value): self._payer_ip_role_id = value @property def receipt_no(self):", "value def to_alipay_dict(self): params = dict() if self.bsn_no: if hasattr(self.bsn_no, 'to_alipay_dict'): params['bsn_no'] =", "if self.source: if hasattr(self.source, 'to_alipay_dict'): params['source'] = self.source.to_alipay_dict() else: params['source'] = self.source if", "= d['payee_account_name'] if 'payee_account_no' in d: o.payee_account_no = d['payee_account_no'] if 'payee_inst_id' in d:", "None self._channel = None self._channel_log_no = None self._channel_memo = None self._collect_amt = None", "d['payer_account_no'] if 'payer_bank_branch_name' in d: o.payer_bank_branch_name = d['payer_bank_branch_name'] if 'payer_inst_id' in d: o.payer_inst_id", "o.payee_account_name = d['payee_account_name'] if 'payee_account_no' in d: o.payee_account_no = d['payee_account_no'] if 'payee_inst_id' in", "= self.bsn_no if self.bsn_ref_no: if hasattr(self.bsn_ref_no, 'to_alipay_dict'): params['bsn_ref_no'] = self.bsn_ref_no.to_alipay_dict() else: params['bsn_ref_no'] =", "d: o.channel_memo = d['channel_memo'] if 'collect_amt' in d: o.collect_amt = d['collect_amt'] if 'collect_date'", "= None self._collect_amt = None self._collect_date = None self._collect_status = None self._collected_amt =", "def gmt_create(self): return self._gmt_create @gmt_create.setter def gmt_create(self, value): self._gmt_create = value @property def", "= self.payer_account_no.to_alipay_dict() else: params['payer_account_no'] = self.payer_account_no if self.payer_bank_branch_name: if hasattr(self.payer_bank_branch_name, 'to_alipay_dict'): params['payer_bank_branch_name'] =", "if hasattr(self.payer_account_name, 'to_alipay_dict'): params['payer_account_name'] = self.payer_account_name.to_alipay_dict() else: params['payer_account_name'] = self.payer_account_name if self.payer_account_no: if", "= value @property def payer_account_no(self): return self._payer_account_no @payer_account_no.setter def payer_account_no(self, value): self._payer_account_no =", "value else: self._collected_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def creator(self): return self._creator @creator.setter def creator(self,", "MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi class CollectReceiptOpenApiDTO(object): def __init__(self):", "def business_scene(self): return self._business_scene @business_scene.setter def business_scene(self, value): self._business_scene = value @property def", "= value @property def channel(self): return self._channel @channel.setter def channel(self, value): self._channel =", "self._channel = value @property def channel_log_no(self): return self._channel_log_no @channel_log_no.setter def channel_log_no(self, value): self._channel_log_no", "@property def receipt_no(self): return self._receipt_no @receipt_no.setter def receipt_no(self, value): self._receipt_no = value @property", "= self.collected_amt.to_alipay_dict() else: params['collected_amt'] = self.collected_amt if self.creator: if hasattr(self.creator, 'to_alipay_dict'): params['creator'] =", "params['freeze_amt'] = self.freeze_amt.to_alipay_dict() else: params['freeze_amt'] = self.freeze_amt if self.fund_log_id: if hasattr(self.fund_log_id, 'to_alipay_dict'): params['fund_log_id']", "d['bsn_no'] if 'bsn_ref_no' in d: o.bsn_ref_no = d['bsn_ref_no'] if 'business_scene' in d: o.business_scene", "if self.receipt_no: if hasattr(self.receipt_no, 'to_alipay_dict'): params['receipt_no'] = self.receipt_no.to_alipay_dict() else: params['receipt_no'] = self.receipt_no if", "isinstance(value, MultiCurrencyMoneyOpenApi): self._used_amt = value else: self._used_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def writeoff_relative_id(self): return", "self._gl_exchange_rate = value @property def gmt_create(self): return self._gmt_create @gmt_create.setter def gmt_create(self, value): self._gmt_create", "self._status @status.setter def status(self, value): self._status = value @property def tnt_inst_id(self): return self._tnt_inst_id", "= self.channel if self.channel_log_no: if hasattr(self.channel_log_no, 'to_alipay_dict'): params['channel_log_no'] = self.channel_log_no.to_alipay_dict() else: params['channel_log_no'] =", "'gmt_modified' in d: o.gmt_modified = d['gmt_modified'] if 'payee_account_name' in d: o.payee_account_name = d['payee_account_name']", "if 'tnt_inst_id' in d: o.tnt_inst_id = d['tnt_inst_id'] if 'used_amt' in d: o.used_amt =", "self._receipt_no = None self._ref_trans_no = None self._ref_trans_no_type = None self._source = None self._status", "in d: o.channel_memo = d['channel_memo'] if 'collect_amt' in d: o.collect_amt = d['collect_amt'] if", "return self._bsn_ref_no @bsn_ref_no.setter def bsn_ref_no(self, value): self._bsn_ref_no = value @property def business_scene(self): return", "= value @property def payer_ip_role_id(self): return self._payer_ip_role_id @payer_ip_role_id.setter def payer_ip_role_id(self, value): self._payer_ip_role_id =", "return self._business_scene @business_scene.setter def business_scene(self, value): self._business_scene = value @property def channel(self): return", "self._gl_exchange_rate @gl_exchange_rate.setter def gl_exchange_rate(self, value): self._gl_exchange_rate = value @property def gmt_create(self): return self._gmt_create", "def ref_trans_no(self, value): self._ref_trans_no = value @property def ref_trans_no_type(self): return self._ref_trans_no_type @ref_trans_no_type.setter def", "= value else: self._freeze_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def fund_log_id(self): return self._fund_log_id @fund_log_id.setter def", "def payer_bank_branch_name(self, value): self._payer_bank_branch_name = value @property def payer_inst_id(self): return self._payer_inst_id @payer_inst_id.setter def", "= d['payer_ip_role_id'] if 'receipt_no' in d: o.receipt_no = d['receipt_no'] if 'ref_trans_no' in d:", "= self.payee_inst_id if self.payee_ip_role_id: if hasattr(self.payee_ip_role_id, 'to_alipay_dict'): params['payee_ip_role_id'] = self.payee_ip_role_id.to_alipay_dict() else: params['payee_ip_role_id'] =", "channel_memo(self, value): self._channel_memo = value @property def collect_amt(self): return self._collect_amt @collect_amt.setter def collect_amt(self,", "hasattr(self.bsn_no, 'to_alipay_dict'): params['bsn_no'] = self.bsn_no.to_alipay_dict() else: params['bsn_no'] = self.bsn_no if self.bsn_ref_no: if hasattr(self.bsn_ref_no,", "= self.creator.to_alipay_dict() else: params['creator'] = self.creator if self.freeze_amt: if hasattr(self.freeze_amt, 'to_alipay_dict'): params['freeze_amt'] =", "= MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def collect_date(self): return self._collect_date @collect_date.setter def collect_date(self, value): self._collect_date =", "= self.business_scene if self.channel: if hasattr(self.channel, 'to_alipay_dict'): params['channel'] = self.channel.to_alipay_dict() else: params['channel'] =", "hasattr(self.payee_account_no, 'to_alipay_dict'): params['payee_account_no'] = self.payee_account_no.to_alipay_dict() else: params['payee_account_no'] = self.payee_account_no if self.payee_inst_id: if hasattr(self.payee_inst_id,", "= None self._payer_ip_role_id = None self._receipt_no = None self._ref_trans_no = None self._ref_trans_no_type =", "def status(self): return self._status @status.setter def status(self, value): self._status = value @property def", "params['gl_exchange_rate'] = self.gl_exchange_rate if self.gmt_create: if hasattr(self.gmt_create, 'to_alipay_dict'): params['gmt_create'] = self.gmt_create.to_alipay_dict() else: params['gmt_create']", "params['payer_account_no'] = self.payer_account_no.to_alipay_dict() else: params['payer_account_no'] = self.payer_account_no if self.payer_bank_branch_name: if hasattr(self.payer_bank_branch_name, 'to_alipay_dict'): params['payer_bank_branch_name']", "payer_inst_id(self, value): self._payer_inst_id = value @property def payer_ip_role_id(self): return self._payer_ip_role_id @payer_ip_role_id.setter def payer_ip_role_id(self,", "import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi class CollectReceiptOpenApiDTO(object): def", "return self._used_amt @used_amt.setter def used_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._used_amt = value else:", "self._payer_bank_branch_name = value @property def payer_inst_id(self): return self._payer_inst_id @payer_inst_id.setter def payer_inst_id(self, value): self._payer_inst_id", "@property def writeoff_relative_id(self): return self._writeoff_relative_id @writeoff_relative_id.setter def writeoff_relative_id(self, value): self._writeoff_relative_id = value def", "hasattr(self.channel_log_no, 'to_alipay_dict'): params['channel_log_no'] = self.channel_log_no.to_alipay_dict() else: params['channel_log_no'] = self.channel_log_no if self.channel_memo: if hasattr(self.channel_memo,", "None self._payer_bank_branch_name = None self._payer_inst_id = None self._payer_ip_role_id = None self._receipt_no = None", "self._ref_trans_no_type = value @property def source(self): return self._source @source.setter def source(self, value): self._source", "from alipay.aop.api.constant.ParamConstants import * from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from", "self._tnt_inst_id = value @property def used_amt(self): return self._used_amt @used_amt.setter def used_amt(self, value): if", "self._fund_log_id = None self._gl_exchange_rate = None self._gmt_create = None self._gmt_modified = None self._payee_account_name", "params['used_amt'] = self.used_amt if self.writeoff_relative_id: if hasattr(self.writeoff_relative_id, 'to_alipay_dict'): params['writeoff_relative_id'] = self.writeoff_relative_id.to_alipay_dict() else: params['writeoff_relative_id']", "if 'collect_amt' in d: o.collect_amt = d['collect_amt'] if 'collect_date' in d: o.collect_date =", "MultiCurrencyMoneyOpenApi): self._freeze_amt = value else: self._freeze_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def fund_log_id(self): return self._fund_log_id", "params['bsn_no'] = self.bsn_no.to_alipay_dict() else: params['bsn_no'] = self.bsn_no if self.bsn_ref_no: if hasattr(self.bsn_ref_no, 'to_alipay_dict'): params['bsn_ref_no']", "= value @property def payee_account_name(self): return self._payee_account_name @payee_account_name.setter def payee_account_name(self, value): self._payee_account_name =", "if hasattr(self.bsn_no, 'to_alipay_dict'): params['bsn_no'] = self.bsn_no.to_alipay_dict() else: params['bsn_no'] = self.bsn_no if self.bsn_ref_no: if", "self.freeze_amt if self.fund_log_id: if hasattr(self.fund_log_id, 'to_alipay_dict'): params['fund_log_id'] = self.fund_log_id.to_alipay_dict() else: params['fund_log_id'] = self.fund_log_id", "value): self._receipt_no = value @property def ref_trans_no(self): return self._ref_trans_no @ref_trans_no.setter def ref_trans_no(self, value):", "o.creator = d['creator'] if 'freeze_amt' in d: o.freeze_amt = d['freeze_amt'] if 'fund_log_id' in", "return self._payer_bank_branch_name @payer_bank_branch_name.setter def payer_bank_branch_name(self, value): self._payer_bank_branch_name = value @property def payer_inst_id(self): return", "'to_alipay_dict'): params['tnt_inst_id'] = self.tnt_inst_id.to_alipay_dict() else: params['tnt_inst_id'] = self.tnt_inst_id if self.used_amt: if hasattr(self.used_amt, 'to_alipay_dict'):", "@property def gl_exchange_rate(self): return self._gl_exchange_rate @gl_exchange_rate.setter def gl_exchange_rate(self, value): self._gl_exchange_rate = value @property", "params['business_scene'] = self.business_scene if self.channel: if hasattr(self.channel, 'to_alipay_dict'): params['channel'] = self.channel.to_alipay_dict() else: params['channel']", "'to_alipay_dict'): params['gmt_create'] = self.gmt_create.to_alipay_dict() else: params['gmt_create'] = self.gmt_create if self.gmt_modified: if hasattr(self.gmt_modified, 'to_alipay_dict'):", "channel_log_no(self): return self._channel_log_no @channel_log_no.setter def channel_log_no(self, value): self._channel_log_no = value @property def channel_memo(self):", "def collect_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collect_amt = value else: self._collect_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value)", "self._payee_inst_id @payee_inst_id.setter def payee_inst_id(self, value): self._payee_inst_id = value @property def payee_ip_role_id(self): return self._payee_ip_role_id", "if self.channel_log_no: if hasattr(self.channel_log_no, 'to_alipay_dict'): params['channel_log_no'] = self.channel_log_no.to_alipay_dict() else: params['channel_log_no'] = self.channel_log_no if", "self.channel.to_alipay_dict() else: params['channel'] = self.channel if self.channel_log_no: if hasattr(self.channel_log_no, 'to_alipay_dict'): params['channel_log_no'] = self.channel_log_no.to_alipay_dict()", "= self.payee_ip_role_id if self.payer_account_name: if hasattr(self.payer_account_name, 'to_alipay_dict'): params['payer_account_name'] = self.payer_account_name.to_alipay_dict() else: params['payer_account_name'] =", "payee_account_no(self): return self._payee_account_no @payee_account_no.setter def payee_account_no(self, value): self._payee_account_no = value @property def payee_inst_id(self):", "else: params['ref_trans_no_type'] = self.ref_trans_no_type if self.source: if hasattr(self.source, 'to_alipay_dict'): params['source'] = self.source.to_alipay_dict() else:", "= self.used_amt if self.writeoff_relative_id: if hasattr(self.writeoff_relative_id, 'to_alipay_dict'): params['writeoff_relative_id'] = self.writeoff_relative_id.to_alipay_dict() else: params['writeoff_relative_id'] =", "= self.collect_status.to_alipay_dict() else: params['collect_status'] = self.collect_status if self.collected_amt: if hasattr(self.collected_amt, 'to_alipay_dict'): params['collected_amt'] =", "@property def freeze_amt(self): return self._freeze_amt @freeze_amt.setter def freeze_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._freeze_amt", "self._business_scene @business_scene.setter def business_scene(self, value): self._business_scene = value @property def channel(self): return self._channel", "def ref_trans_no_type(self): return self._ref_trans_no_type @ref_trans_no_type.setter def ref_trans_no_type(self, value): self._ref_trans_no_type = value @property def", "if hasattr(self.channel_log_no, 'to_alipay_dict'): params['channel_log_no'] = self.channel_log_no.to_alipay_dict() else: params['channel_log_no'] = self.channel_log_no if self.channel_memo: if", "self.bsn_no.to_alipay_dict() else: params['bsn_no'] = self.bsn_no if self.bsn_ref_no: if hasattr(self.bsn_ref_no, 'to_alipay_dict'): params['bsn_ref_no'] = self.bsn_ref_no.to_alipay_dict()", "d: o.source = d['source'] if 'status' in d: o.status = d['status'] if 'tnt_inst_id'", "@property def payee_account_name(self): return self._payee_account_name @payee_account_name.setter def payee_account_name(self, value): self._payee_account_name = value @property", "self.gmt_modified: if hasattr(self.gmt_modified, 'to_alipay_dict'): params['gmt_modified'] = self.gmt_modified.to_alipay_dict() else: params['gmt_modified'] = self.gmt_modified if self.payee_account_name:", "self._ref_trans_no = None self._ref_trans_no_type = None self._source = None self._status = None self._tnt_inst_id", "None self._used_amt = None self._writeoff_relative_id = None @property def bsn_no(self): return self._bsn_no @bsn_no.setter", "self.gmt_modified if self.payee_account_name: if hasattr(self.payee_account_name, 'to_alipay_dict'): params['payee_account_name'] = self.payee_account_name.to_alipay_dict() else: params['payee_account_name'] = self.payee_account_name", "params['payer_inst_id'] = self.payer_inst_id.to_alipay_dict() else: params['payer_inst_id'] = self.payer_inst_id if self.payer_ip_role_id: if hasattr(self.payer_ip_role_id, 'to_alipay_dict'): params['payer_ip_role_id']", "self._collected_amt = value else: self._collected_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def creator(self): return self._creator @creator.setter", "self._collect_date = None self._collect_status = None self._collected_amt = None self._creator = None self._freeze_amt", "if hasattr(self.ref_trans_no, 'to_alipay_dict'): params['ref_trans_no'] = self.ref_trans_no.to_alipay_dict() else: params['ref_trans_no'] = self.ref_trans_no if self.ref_trans_no_type: if", "d: o.bsn_no = d['bsn_no'] if 'bsn_ref_no' in d: o.bsn_ref_no = d['bsn_ref_no'] if 'business_scene'", "in d: o.ref_trans_no_type = d['ref_trans_no_type'] if 'source' in d: o.source = d['source'] if", "collect_amt(self): return self._collect_amt @collect_amt.setter def collect_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collect_amt = value", "if hasattr(self.freeze_amt, 'to_alipay_dict'): params['freeze_amt'] = self.freeze_amt.to_alipay_dict() else: params['freeze_amt'] = self.freeze_amt if self.fund_log_id: if", "'fund_log_id' in d: o.fund_log_id = d['fund_log_id'] if 'gl_exchange_rate' in d: o.gl_exchange_rate = d['gl_exchange_rate']", "value): self._payer_account_name = value @property def payer_account_no(self): return self._payer_account_no @payer_account_no.setter def payer_account_no(self, value):", "import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi class CollectReceiptOpenApiDTO(object): def __init__(self): self._bsn_no = None", "status(self, value): self._status = value @property def tnt_inst_id(self): return self._tnt_inst_id @tnt_inst_id.setter def tnt_inst_id(self,", "= MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def creator(self): return self._creator @creator.setter def creator(self, value): self._creator =", "def gmt_modified(self): return self._gmt_modified @gmt_modified.setter def gmt_modified(self, value): self._gmt_modified = value @property def", "= self.collect_status if self.collected_amt: if hasattr(self.collected_amt, 'to_alipay_dict'): params['collected_amt'] = self.collected_amt.to_alipay_dict() else: params['collected_amt'] =", "= None self._gl_exchange_rate = None self._gmt_create = None self._gmt_modified = None self._payee_account_name =", "= value @property def ref_trans_no_type(self): return self._ref_trans_no_type @ref_trans_no_type.setter def ref_trans_no_type(self, value): self._ref_trans_no_type =", "value @property def channel_log_no(self): return self._channel_log_no @channel_log_no.setter def channel_log_no(self, value): self._channel_log_no = value", "@status.setter def status(self, value): self._status = value @property def tnt_inst_id(self): return self._tnt_inst_id @tnt_inst_id.setter", "@property def payer_account_no(self): return self._payer_account_no @payer_account_no.setter def payer_account_no(self, value): self._payer_account_no = value @property", "tnt_inst_id(self): return self._tnt_inst_id @tnt_inst_id.setter def tnt_inst_id(self, value): self._tnt_inst_id = value @property def used_amt(self):", "params['channel_memo'] = self.channel_memo.to_alipay_dict() else: params['channel_memo'] = self.channel_memo if self.collect_amt: if hasattr(self.collect_amt, 'to_alipay_dict'): params['collect_amt']", "hasattr(self.receipt_no, 'to_alipay_dict'): params['receipt_no'] = self.receipt_no.to_alipay_dict() else: params['receipt_no'] = self.receipt_no if self.ref_trans_no: if hasattr(self.ref_trans_no,", "if self.payer_account_name: if hasattr(self.payer_account_name, 'to_alipay_dict'): params['payer_account_name'] = self.payer_account_name.to_alipay_dict() else: params['payer_account_name'] = self.payer_account_name if", "= d['collect_date'] if 'collect_status' in d: o.collect_status = d['collect_status'] if 'collected_amt' in d:", "= value @property def bsn_ref_no(self): return self._bsn_ref_no @bsn_ref_no.setter def bsn_ref_no(self, value): self._bsn_ref_no =", "self.collect_status: if hasattr(self.collect_status, 'to_alipay_dict'): params['collect_status'] = self.collect_status.to_alipay_dict() else: params['collect_status'] = self.collect_status if self.collected_amt:", "if self.gmt_create: if hasattr(self.gmt_create, 'to_alipay_dict'): params['gmt_create'] = self.gmt_create.to_alipay_dict() else: params['gmt_create'] = self.gmt_create if", "self._creator = value @property def freeze_amt(self): return self._freeze_amt @freeze_amt.setter def freeze_amt(self, value): if", "used_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._used_amt = value else: self._used_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property", "None self._receipt_no = None self._ref_trans_no = None self._ref_trans_no_type = None self._source = None", "gmt_create(self): return self._gmt_create @gmt_create.setter def gmt_create(self, value): self._gmt_create = value @property def gmt_modified(self):", "else: params['fund_log_id'] = self.fund_log_id if self.gl_exchange_rate: if hasattr(self.gl_exchange_rate, 'to_alipay_dict'): params['gl_exchange_rate'] = self.gl_exchange_rate.to_alipay_dict() else:", "= None self._collect_status = None self._collected_amt = None self._creator = None self._freeze_amt =", "= None self._payer_account_name = None self._payer_account_no = None self._payer_bank_branch_name = None self._payer_inst_id =", "= self.freeze_amt.to_alipay_dict() else: params['freeze_amt'] = self.freeze_amt if self.fund_log_id: if hasattr(self.fund_log_id, 'to_alipay_dict'): params['fund_log_id'] =", "self._ref_trans_no_type = None self._source = None self._status = None self._tnt_inst_id = None self._used_amt", "d: o.freeze_amt = d['freeze_amt'] if 'fund_log_id' in d: o.fund_log_id = d['fund_log_id'] if 'gl_exchange_rate'", "'to_alipay_dict'): params['creator'] = self.creator.to_alipay_dict() else: params['creator'] = self.creator if self.freeze_amt: if hasattr(self.freeze_amt, 'to_alipay_dict'):", "collect_status(self): return self._collect_status @collect_status.setter def collect_status(self, value): self._collect_status = value @property def collected_amt(self):", "self.payer_ip_role_id: if hasattr(self.payer_ip_role_id, 'to_alipay_dict'): params['payer_ip_role_id'] = self.payer_ip_role_id.to_alipay_dict() else: params['payer_ip_role_id'] = self.payer_ip_role_id if self.receipt_no:", "self.collect_date: if hasattr(self.collect_date, 'to_alipay_dict'): params['collect_date'] = self.collect_date.to_alipay_dict() else: params['collect_date'] = self.collect_date if self.collect_status:", "self.creator: if hasattr(self.creator, 'to_alipay_dict'): params['creator'] = self.creator.to_alipay_dict() else: params['creator'] = self.creator if self.freeze_amt:", "self.ref_trans_no_type.to_alipay_dict() else: params['ref_trans_no_type'] = self.ref_trans_no_type if self.source: if hasattr(self.source, 'to_alipay_dict'): params['source'] = self.source.to_alipay_dict()", "payee_account_name(self, value): self._payee_account_name = value @property def payee_account_no(self): return self._payee_account_no @payee_account_no.setter def payee_account_no(self,", "in d: o.payer_account_name = d['payer_account_name'] if 'payer_account_no' in d: o.payer_account_no = d['payer_account_no'] if", "return self._payer_account_no @payer_account_no.setter def payer_account_no(self, value): self._payer_account_no = value @property def payer_bank_branch_name(self): return", "params['payee_ip_role_id'] = self.payee_ip_role_id.to_alipay_dict() else: params['payee_ip_role_id'] = self.payee_ip_role_id if self.payer_account_name: if hasattr(self.payer_account_name, 'to_alipay_dict'): params['payer_account_name']", "return self._writeoff_relative_id @writeoff_relative_id.setter def writeoff_relative_id(self, value): self._writeoff_relative_id = value def to_alipay_dict(self): params =", "self.bsn_ref_no.to_alipay_dict() else: params['bsn_ref_no'] = self.bsn_ref_no if self.business_scene: if hasattr(self.business_scene, 'to_alipay_dict'): params['business_scene'] = self.business_scene.to_alipay_dict()", "self.status: if hasattr(self.status, 'to_alipay_dict'): params['status'] = self.status.to_alipay_dict() else: params['status'] = self.status if self.tnt_inst_id:", "'to_alipay_dict'): params['collect_amt'] = self.collect_amt.to_alipay_dict() else: params['collect_amt'] = self.collect_amt if self.collect_date: if hasattr(self.collect_date, 'to_alipay_dict'):", "MultiCurrencyMoneyOpenApi): self._collect_amt = value else: self._collect_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def collect_date(self): return self._collect_date", "in d: o.bsn_no = d['bsn_no'] if 'bsn_ref_no' in d: o.bsn_ref_no = d['bsn_ref_no'] if", "= self.bsn_no.to_alipay_dict() else: params['bsn_no'] = self.bsn_no if self.bsn_ref_no: if hasattr(self.bsn_ref_no, 'to_alipay_dict'): params['bsn_ref_no'] =", "self._payee_ip_role_id = None self._payer_account_name = None self._payer_account_no = None self._payer_bank_branch_name = None self._payer_inst_id", "@property def collect_date(self): return self._collect_date @collect_date.setter def collect_date(self, value): self._collect_date = value @property", "= self.creator if self.freeze_amt: if hasattr(self.freeze_amt, 'to_alipay_dict'): params['freeze_amt'] = self.freeze_amt.to_alipay_dict() else: params['freeze_amt'] =", "= value else: self._used_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def writeoff_relative_id(self): return self._writeoff_relative_id @writeoff_relative_id.setter def", "value): self._payer_account_no = value @property def payer_bank_branch_name(self): return self._payer_bank_branch_name @payer_bank_branch_name.setter def payer_bank_branch_name(self, value):", "in d: o.collect_amt = d['collect_amt'] if 'collect_date' in d: o.collect_date = d['collect_date'] if", "'gl_exchange_rate' in d: o.gl_exchange_rate = d['gl_exchange_rate'] if 'gmt_create' in d: o.gmt_create = d['gmt_create']", "self.collected_amt.to_alipay_dict() else: params['collected_amt'] = self.collected_amt if self.creator: if hasattr(self.creator, 'to_alipay_dict'): params['creator'] = self.creator.to_alipay_dict()", "= self.payer_bank_branch_name.to_alipay_dict() else: params['payer_bank_branch_name'] = self.payer_bank_branch_name if self.payer_inst_id: if hasattr(self.payer_inst_id, 'to_alipay_dict'): params['payer_inst_id'] =", "if self.payee_account_no: if hasattr(self.payee_account_no, 'to_alipay_dict'): params['payee_account_no'] = self.payee_account_no.to_alipay_dict() else: params['payee_account_no'] = self.payee_account_no if", "def payer_account_no(self): return self._payer_account_no @payer_account_no.setter def payer_account_no(self, value): self._payer_account_no = value @property def", "self.channel_memo.to_alipay_dict() else: params['channel_memo'] = self.channel_memo if self.collect_amt: if hasattr(self.collect_amt, 'to_alipay_dict'): params['collect_amt'] = self.collect_amt.to_alipay_dict()", "self._gmt_modified = None self._payee_account_name = None self._payee_account_no = None self._payee_inst_id = None self._payee_ip_role_id", "payer_inst_id(self): return self._payer_inst_id @payer_inst_id.setter def payer_inst_id(self, value): self._payer_inst_id = value @property def payer_ip_role_id(self):", "self._used_amt = value else: self._used_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def writeoff_relative_id(self): return self._writeoff_relative_id @writeoff_relative_id.setter", "'channel' in d: o.channel = d['channel'] if 'channel_log_no' in d: o.channel_log_no = d['channel_log_no']", "self.writeoff_relative_id return params @staticmethod def from_alipay_dict(d): if not d: return None o =", "'payer_account_no' in d: o.payer_account_no = d['payer_account_no'] if 'payer_bank_branch_name' in d: o.payer_bank_branch_name = d['payer_bank_branch_name']", "in d: o.collect_status = d['collect_status'] if 'collected_amt' in d: o.collected_amt = d['collected_amt'] if", "self.tnt_inst_id: if hasattr(self.tnt_inst_id, 'to_alipay_dict'): params['tnt_inst_id'] = self.tnt_inst_id.to_alipay_dict() else: params['tnt_inst_id'] = self.tnt_inst_id if self.used_amt:", "if hasattr(self.channel, 'to_alipay_dict'): params['channel'] = self.channel.to_alipay_dict() else: params['channel'] = self.channel if self.channel_log_no: if", "MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi class CollectReceiptOpenApiDTO(object): def __init__(self): self._bsn_no = None self._bsn_ref_no", "return self._freeze_amt @freeze_amt.setter def freeze_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._freeze_amt = value else:", "MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def writeoff_relative_id(self): return self._writeoff_relative_id @writeoff_relative_id.setter def writeoff_relative_id(self, value): self._writeoff_relative_id = value", "hasattr(self.channel_memo, 'to_alipay_dict'): params['channel_memo'] = self.channel_memo.to_alipay_dict() else: params['channel_memo'] = self.channel_memo if self.collect_amt: if hasattr(self.collect_amt,", "@property def payer_account_name(self): return self._payer_account_name @payer_account_name.setter def payer_account_name(self, value): self._payer_account_name = value @property", "self.collect_status if self.collected_amt: if hasattr(self.collected_amt, 'to_alipay_dict'): params['collected_amt'] = self.collected_amt.to_alipay_dict() else: params['collected_amt'] = self.collected_amt", "= self.gl_exchange_rate.to_alipay_dict() else: params['gl_exchange_rate'] = self.gl_exchange_rate if self.gmt_create: if hasattr(self.gmt_create, 'to_alipay_dict'): params['gmt_create'] =", "= value @property def collect_status(self): return self._collect_status @collect_status.setter def collect_status(self, value): self._collect_status =", "channel_memo(self): return self._channel_memo @channel_memo.setter def channel_memo(self, value): self._channel_memo = value @property def collect_amt(self):", "= value @property def receipt_no(self): return self._receipt_no @receipt_no.setter def receipt_no(self, value): self._receipt_no =", "if hasattr(self.payer_inst_id, 'to_alipay_dict'): params['payer_inst_id'] = self.payer_inst_id.to_alipay_dict() else: params['payer_inst_id'] = self.payer_inst_id if self.payer_ip_role_id: if", "hasattr(self.payer_inst_id, 'to_alipay_dict'): params['payer_inst_id'] = self.payer_inst_id.to_alipay_dict() else: params['payer_inst_id'] = self.payer_inst_id if self.payer_ip_role_id: if hasattr(self.payer_ip_role_id,", "in d: o.payee_account_name = d['payee_account_name'] if 'payee_account_no' in d: o.payee_account_no = d['payee_account_no'] if", "'to_alipay_dict'): params['receipt_no'] = self.receipt_no.to_alipay_dict() else: params['receipt_no'] = self.receipt_no if self.ref_trans_no: if hasattr(self.ref_trans_no, 'to_alipay_dict'):", "hasattr(self.payer_account_name, 'to_alipay_dict'): params['payer_account_name'] = self.payer_account_name.to_alipay_dict() else: params['payer_account_name'] = self.payer_account_name if self.payer_account_no: if hasattr(self.payer_account_no,", "= None self._creator = None self._freeze_amt = None self._fund_log_id = None self._gl_exchange_rate =", "@property def business_scene(self): return self._business_scene @business_scene.setter def business_scene(self, value): self._business_scene = value @property", "@payer_account_no.setter def payer_account_no(self, value): self._payer_account_no = value @property def payer_bank_branch_name(self): return self._payer_bank_branch_name @payer_bank_branch_name.setter", "params['payee_inst_id'] = self.payee_inst_id.to_alipay_dict() else: params['payee_inst_id'] = self.payee_inst_id if self.payee_ip_role_id: if hasattr(self.payee_ip_role_id, 'to_alipay_dict'): params['payee_ip_role_id']", "self._payee_ip_role_id @payee_ip_role_id.setter def payee_ip_role_id(self, value): self._payee_ip_role_id = value @property def payer_account_name(self): return self._payer_account_name", "self._fund_log_id = value @property def gl_exchange_rate(self): return self._gl_exchange_rate @gl_exchange_rate.setter def gl_exchange_rate(self, value): self._gl_exchange_rate", "in d: o.ref_trans_no = d['ref_trans_no'] if 'ref_trans_no_type' in d: o.ref_trans_no_type = d['ref_trans_no_type'] if", "source(self, value): self._source = value @property def status(self): return self._status @status.setter def status(self,", "self._payer_ip_role_id = None self._receipt_no = None self._ref_trans_no = None self._ref_trans_no_type = None self._source", "self._payer_account_name @payer_account_name.setter def payer_account_name(self, value): self._payer_account_name = value @property def payer_account_no(self): return self._payer_account_no", "= value @property def payer_account_name(self): return self._payer_account_name @payer_account_name.setter def payer_account_name(self, value): self._payer_account_name =", "self._collected_amt = None self._creator = None self._freeze_amt = None self._fund_log_id = None self._gl_exchange_rate", "@freeze_amt.setter def freeze_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._freeze_amt = value else: self._freeze_amt =", "else: params['payer_account_name'] = self.payer_account_name if self.payer_account_no: if hasattr(self.payer_account_no, 'to_alipay_dict'): params['payer_account_no'] = self.payer_account_no.to_alipay_dict() else:", "= d['creator'] if 'freeze_amt' in d: o.freeze_amt = d['freeze_amt'] if 'fund_log_id' in d:", "hasattr(self.ref_trans_no, 'to_alipay_dict'): params['ref_trans_no'] = self.ref_trans_no.to_alipay_dict() else: params['ref_trans_no'] = self.ref_trans_no if self.ref_trans_no_type: if hasattr(self.ref_trans_no_type,", "None self._tnt_inst_id = None self._used_amt = None self._writeoff_relative_id = None @property def bsn_no(self):", "d: o.payee_ip_role_id = d['payee_ip_role_id'] if 'payer_account_name' in d: o.payer_account_name = d['payer_account_name'] if 'payer_account_no'", "self._collect_date @collect_date.setter def collect_date(self, value): self._collect_date = value @property def collect_status(self): return self._collect_status", "in d: o.gl_exchange_rate = d['gl_exchange_rate'] if 'gmt_create' in d: o.gmt_create = d['gmt_create'] if", "writeoff_relative_id(self, value): self._writeoff_relative_id = value def to_alipay_dict(self): params = dict() if self.bsn_no: if", "@business_scene.setter def business_scene(self, value): self._business_scene = value @property def channel(self): return self._channel @channel.setter", "= self.collect_amt.to_alipay_dict() else: params['collect_amt'] = self.collect_amt if self.collect_date: if hasattr(self.collect_date, 'to_alipay_dict'): params['collect_date'] =", "o.payee_account_no = d['payee_account_no'] if 'payee_inst_id' in d: o.payee_inst_id = d['payee_inst_id'] if 'payee_ip_role_id' in", "self._bsn_ref_no @bsn_ref_no.setter def bsn_ref_no(self, value): self._bsn_ref_no = value @property def business_scene(self): return self._business_scene", "return self._channel @channel.setter def channel(self, value): self._channel = value @property def channel_log_no(self): return", "if 'payer_ip_role_id' in d: o.payer_ip_role_id = d['payer_ip_role_id'] if 'receipt_no' in d: o.receipt_no =", "o.ref_trans_no_type = d['ref_trans_no_type'] if 'source' in d: o.source = d['source'] if 'status' in", "'to_alipay_dict'): params['payee_account_name'] = self.payee_account_name.to_alipay_dict() else: params['payee_account_name'] = self.payee_account_name if self.payee_account_no: if hasattr(self.payee_account_no, 'to_alipay_dict'):", "if 'used_amt' in d: o.used_amt = d['used_amt'] if 'writeoff_relative_id' in d: o.writeoff_relative_id =", "def source(self, value): self._source = value @property def status(self): return self._status @status.setter def", "'to_alipay_dict'): params['payer_bank_branch_name'] = self.payer_bank_branch_name.to_alipay_dict() else: params['payer_bank_branch_name'] = self.payer_bank_branch_name if self.payer_inst_id: if hasattr(self.payer_inst_id, 'to_alipay_dict'):", "value @property def business_scene(self): return self._business_scene @business_scene.setter def business_scene(self, value): self._business_scene = value", "-*- coding: utf-8 -*- import json from alipay.aop.api.constant.ParamConstants import * from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import", "self.writeoff_relative_id.to_alipay_dict() else: params['writeoff_relative_id'] = self.writeoff_relative_id return params @staticmethod def from_alipay_dict(d): if not d:", "= None self._receipt_no = None self._ref_trans_no = None self._ref_trans_no_type = None self._source =", "def status(self, value): self._status = value @property def tnt_inst_id(self): return self._tnt_inst_id @tnt_inst_id.setter def", "self._gmt_modified = value @property def payee_account_name(self): return self._payee_account_name @payee_account_name.setter def payee_account_name(self, value): self._payee_account_name", "o.payer_ip_role_id = d['payer_ip_role_id'] if 'receipt_no' in d: o.receipt_no = d['receipt_no'] if 'ref_trans_no' in", "self._channel @channel.setter def channel(self, value): self._channel = value @property def channel_log_no(self): return self._channel_log_no", "= self.collect_amt if self.collect_date: if hasattr(self.collect_date, 'to_alipay_dict'): params['collect_date'] = self.collect_date.to_alipay_dict() else: params['collect_date'] =", "@property def channel(self): return self._channel @channel.setter def channel(self, value): self._channel = value @property", "if self.ref_trans_no: if hasattr(self.ref_trans_no, 'to_alipay_dict'): params['ref_trans_no'] = self.ref_trans_no.to_alipay_dict() else: params['ref_trans_no'] = self.ref_trans_no if", "return self._collect_amt @collect_amt.setter def collect_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collect_amt = value else:", "value): self._channel = value @property def channel_log_no(self): return self._channel_log_no @channel_log_no.setter def channel_log_no(self, value):", "self.channel_log_no.to_alipay_dict() else: params['channel_log_no'] = self.channel_log_no if self.channel_memo: if hasattr(self.channel_memo, 'to_alipay_dict'): params['channel_memo'] = self.channel_memo.to_alipay_dict()", "self.freeze_amt.to_alipay_dict() else: params['freeze_amt'] = self.freeze_amt if self.fund_log_id: if hasattr(self.fund_log_id, 'to_alipay_dict'): params['fund_log_id'] = self.fund_log_id.to_alipay_dict()", "def collected_amt(self): return self._collected_amt @collected_amt.setter def collected_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collected_amt =", "params['gmt_create'] = self.gmt_create.to_alipay_dict() else: params['gmt_create'] = self.gmt_create if self.gmt_modified: if hasattr(self.gmt_modified, 'to_alipay_dict'): params['gmt_modified']", "o.tnt_inst_id = d['tnt_inst_id'] if 'used_amt' in d: o.used_amt = d['used_amt'] if 'writeoff_relative_id' in", "def bsn_no(self, value): self._bsn_no = value @property def bsn_ref_no(self): return self._bsn_ref_no @bsn_ref_no.setter def", "= value @property def payer_inst_id(self): return self._payer_inst_id @payer_inst_id.setter def payer_inst_id(self, value): self._payer_inst_id =", "in d: o.used_amt = d['used_amt'] if 'writeoff_relative_id' in d: o.writeoff_relative_id = d['writeoff_relative_id'] return", "gl_exchange_rate(self): return self._gl_exchange_rate @gl_exchange_rate.setter def gl_exchange_rate(self, value): self._gl_exchange_rate = value @property def gmt_create(self):", "params['collect_status'] = self.collect_status if self.collected_amt: if hasattr(self.collected_amt, 'to_alipay_dict'): params['collected_amt'] = self.collected_amt.to_alipay_dict() else: params['collected_amt']", "self.gl_exchange_rate: if hasattr(self.gl_exchange_rate, 'to_alipay_dict'): params['gl_exchange_rate'] = self.gl_exchange_rate.to_alipay_dict() else: params['gl_exchange_rate'] = self.gl_exchange_rate if self.gmt_create:", "self.payee_account_no if self.payee_inst_id: if hasattr(self.payee_inst_id, 'to_alipay_dict'): params['payee_inst_id'] = self.payee_inst_id.to_alipay_dict() else: params['payee_inst_id'] = self.payee_inst_id", "self.payee_inst_id: if hasattr(self.payee_inst_id, 'to_alipay_dict'): params['payee_inst_id'] = self.payee_inst_id.to_alipay_dict() else: params['payee_inst_id'] = self.payee_inst_id if self.payee_ip_role_id:", "params['payer_account_name'] = self.payer_account_name.to_alipay_dict() else: params['payer_account_name'] = self.payer_account_name if self.payer_account_no: if hasattr(self.payer_account_no, 'to_alipay_dict'): params['payer_account_no']", "= None self._channel = None self._channel_log_no = None self._channel_memo = None self._collect_amt =", "in d: o.payee_inst_id = d['payee_inst_id'] if 'payee_ip_role_id' in d: o.payee_ip_role_id = d['payee_ip_role_id'] if", "self._channel_memo @channel_memo.setter def channel_memo(self, value): self._channel_memo = value @property def collect_amt(self): return self._collect_amt", "def receipt_no(self): return self._receipt_no @receipt_no.setter def receipt_no(self, value): self._receipt_no = value @property def", "d: return None o = CollectReceiptOpenApiDTO() if 'bsn_no' in d: o.bsn_no = d['bsn_no']", "d: o.payee_account_no = d['payee_account_no'] if 'payee_inst_id' in d: o.payee_inst_id = d['payee_inst_id'] if 'payee_ip_role_id'", "d['payer_bank_branch_name'] if 'payer_inst_id' in d: o.payer_inst_id = d['payer_inst_id'] if 'payer_ip_role_id' in d: o.payer_ip_role_id", "self.payer_account_no if self.payer_bank_branch_name: if hasattr(self.payer_bank_branch_name, 'to_alipay_dict'): params['payer_bank_branch_name'] = self.payer_bank_branch_name.to_alipay_dict() else: params['payer_bank_branch_name'] = self.payer_bank_branch_name", "@property def tnt_inst_id(self): return self._tnt_inst_id @tnt_inst_id.setter def tnt_inst_id(self, value): self._tnt_inst_id = value @property", "value): self._bsn_no = value @property def bsn_ref_no(self): return self._bsn_ref_no @bsn_ref_no.setter def bsn_ref_no(self, value):", "d: o.channel_log_no = d['channel_log_no'] if 'channel_memo' in d: o.channel_memo = d['channel_memo'] if 'collect_amt'", "if 'status' in d: o.status = d['status'] if 'tnt_inst_id' in d: o.tnt_inst_id =", "= None self._bsn_ref_no = None self._business_scene = None self._channel = None self._channel_log_no =", "else: params['payee_ip_role_id'] = self.payee_ip_role_id if self.payer_account_name: if hasattr(self.payer_account_name, 'to_alipay_dict'): params['payer_account_name'] = self.payer_account_name.to_alipay_dict() else:", "@property def ref_trans_no_type(self): return self._ref_trans_no_type @ref_trans_no_type.setter def ref_trans_no_type(self, value): self._ref_trans_no_type = value @property", "= self.payee_account_name if self.payee_account_no: if hasattr(self.payee_account_no, 'to_alipay_dict'): params['payee_account_no'] = self.payee_account_no.to_alipay_dict() else: params['payee_account_no'] =", "= self.bsn_ref_no if self.business_scene: if hasattr(self.business_scene, 'to_alipay_dict'): params['business_scene'] = self.business_scene.to_alipay_dict() else: params['business_scene'] =", "value): self._gmt_modified = value @property def payee_account_name(self): return self._payee_account_name @payee_account_name.setter def payee_account_name(self, value):", "ref_trans_no_type(self): return self._ref_trans_no_type @ref_trans_no_type.setter def ref_trans_no_type(self, value): self._ref_trans_no_type = value @property def source(self):", "= self.gmt_modified.to_alipay_dict() else: params['gmt_modified'] = self.gmt_modified if self.payee_account_name: if hasattr(self.payee_account_name, 'to_alipay_dict'): params['payee_account_name'] =", "def payer_inst_id(self, value): self._payer_inst_id = value @property def payer_ip_role_id(self): return self._payer_ip_role_id @payer_ip_role_id.setter def", "params['source'] = self.source if self.status: if hasattr(self.status, 'to_alipay_dict'): params['status'] = self.status.to_alipay_dict() else: params['status']", "value @property def status(self): return self._status @status.setter def status(self, value): self._status = value", "if 'collected_amt' in d: o.collected_amt = d['collected_amt'] if 'creator' in d: o.creator =", "def collect_amt(self): return self._collect_amt @collect_amt.setter def collect_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collect_amt =", "if isinstance(value, MultiCurrencyMoneyOpenApi): self._freeze_amt = value else: self._freeze_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def fund_log_id(self):", "None self._creator = None self._freeze_amt = None self._fund_log_id = None self._gl_exchange_rate = None", "d: o.status = d['status'] if 'tnt_inst_id' in d: o.tnt_inst_id = d['tnt_inst_id'] if 'used_amt'", "d['tnt_inst_id'] if 'used_amt' in d: o.used_amt = d['used_amt'] if 'writeoff_relative_id' in d: o.writeoff_relative_id", "def bsn_no(self): return self._bsn_no @bsn_no.setter def bsn_no(self, value): self._bsn_no = value @property def", "value @property def payer_inst_id(self): return self._payer_inst_id @payer_inst_id.setter def payer_inst_id(self, value): self._payer_inst_id = value", "self.used_amt.to_alipay_dict() else: params['used_amt'] = self.used_amt if self.writeoff_relative_id: if hasattr(self.writeoff_relative_id, 'to_alipay_dict'): params['writeoff_relative_id'] = self.writeoff_relative_id.to_alipay_dict()", "in d: o.channel_log_no = d['channel_log_no'] if 'channel_memo' in d: o.channel_memo = d['channel_memo'] if", "params['status'] = self.status if self.tnt_inst_id: if hasattr(self.tnt_inst_id, 'to_alipay_dict'): params['tnt_inst_id'] = self.tnt_inst_id.to_alipay_dict() else: params['tnt_inst_id']", "hasattr(self.creator, 'to_alipay_dict'): params['creator'] = self.creator.to_alipay_dict() else: params['creator'] = self.creator if self.freeze_amt: if hasattr(self.freeze_amt,", "d['status'] if 'tnt_inst_id' in d: o.tnt_inst_id = d['tnt_inst_id'] if 'used_amt' in d: o.used_amt", "value @property def bsn_ref_no(self): return self._bsn_ref_no @bsn_ref_no.setter def bsn_ref_no(self, value): self._bsn_ref_no = value", "@property def payee_ip_role_id(self): return self._payee_ip_role_id @payee_ip_role_id.setter def payee_ip_role_id(self, value): self._payee_ip_role_id = value @property", "hasattr(self.payee_inst_id, 'to_alipay_dict'): params['payee_inst_id'] = self.payee_inst_id.to_alipay_dict() else: params['payee_inst_id'] = self.payee_inst_id if self.payee_ip_role_id: if hasattr(self.payee_ip_role_id,", "'payee_account_no' in d: o.payee_account_no = d['payee_account_no'] if 'payee_inst_id' in d: o.payee_inst_id = d['payee_inst_id']", "if self.payee_account_name: if hasattr(self.payee_account_name, 'to_alipay_dict'): params['payee_account_name'] = self.payee_account_name.to_alipay_dict() else: params['payee_account_name'] = self.payee_account_name if", "alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi", "= None self._payee_account_name = None self._payee_account_no = None self._payee_inst_id = None self._payee_ip_role_id =", "= None self._payer_bank_branch_name = None self._payer_inst_id = None self._payer_ip_role_id = None self._receipt_no =", "'source' in d: o.source = d['source'] if 'status' in d: o.status = d['status']", "'payee_inst_id' in d: o.payee_inst_id = d['payee_inst_id'] if 'payee_ip_role_id' in d: o.payee_ip_role_id = d['payee_ip_role_id']", "payee_ip_role_id(self): return self._payee_ip_role_id @payee_ip_role_id.setter def payee_ip_role_id(self, value): self._payee_ip_role_id = value @property def payer_account_name(self):", "if hasattr(self.payer_account_no, 'to_alipay_dict'): params['payer_account_no'] = self.payer_account_no.to_alipay_dict() else: params['payer_account_no'] = self.payer_account_no if self.payer_bank_branch_name: if", "hasattr(self.gl_exchange_rate, 'to_alipay_dict'): params['gl_exchange_rate'] = self.gl_exchange_rate.to_alipay_dict() else: params['gl_exchange_rate'] = self.gl_exchange_rate if self.gmt_create: if hasattr(self.gmt_create,", "= value @property def freeze_amt(self): return self._freeze_amt @freeze_amt.setter def freeze_amt(self, value): if isinstance(value,", "else: params['payee_account_no'] = self.payee_account_no if self.payee_inst_id: if hasattr(self.payee_inst_id, 'to_alipay_dict'): params['payee_inst_id'] = self.payee_inst_id.to_alipay_dict() else:", "o.gmt_create = d['gmt_create'] if 'gmt_modified' in d: o.gmt_modified = d['gmt_modified'] if 'payee_account_name' in", "self._receipt_no @receipt_no.setter def receipt_no(self, value): self._receipt_no = value @property def ref_trans_no(self): return self._ref_trans_no", "return self._payee_account_name @payee_account_name.setter def payee_account_name(self, value): self._payee_account_name = value @property def payee_account_no(self): return", "= self.channel_log_no if self.channel_memo: if hasattr(self.channel_memo, 'to_alipay_dict'): params['channel_memo'] = self.channel_memo.to_alipay_dict() else: params['channel_memo'] =", "params['writeoff_relative_id'] = self.writeoff_relative_id return params @staticmethod def from_alipay_dict(d): if not d: return None", "def gl_exchange_rate(self, value): self._gl_exchange_rate = value @property def gmt_create(self): return self._gmt_create @gmt_create.setter def", "self.payer_account_name: if hasattr(self.payer_account_name, 'to_alipay_dict'): params['payer_account_name'] = self.payer_account_name.to_alipay_dict() else: params['payer_account_name'] = self.payer_account_name if self.payer_account_no:", "self.payer_inst_id: if hasattr(self.payer_inst_id, 'to_alipay_dict'): params['payer_inst_id'] = self.payer_inst_id.to_alipay_dict() else: params['payer_inst_id'] = self.payer_inst_id if self.payer_ip_role_id:", "self.channel_log_no if self.channel_memo: if hasattr(self.channel_memo, 'to_alipay_dict'): params['channel_memo'] = self.channel_memo.to_alipay_dict() else: params['channel_memo'] = self.channel_memo", "value): self._collect_status = value @property def collected_amt(self): return self._collected_amt @collected_amt.setter def collected_amt(self, value):", "= self.channel_memo.to_alipay_dict() else: params['channel_memo'] = self.channel_memo if self.collect_amt: if hasattr(self.collect_amt, 'to_alipay_dict'): params['collect_amt'] =", "if 'gl_exchange_rate' in d: o.gl_exchange_rate = d['gl_exchange_rate'] if 'gmt_create' in d: o.gmt_create =", "MultiCurrencyMoneyOpenApi class CollectReceiptOpenApiDTO(object): def __init__(self): self._bsn_no = None self._bsn_ref_no = None self._business_scene =", "self.payer_inst_id if self.payer_ip_role_id: if hasattr(self.payer_ip_role_id, 'to_alipay_dict'): params['payer_ip_role_id'] = self.payer_ip_role_id.to_alipay_dict() else: params['payer_ip_role_id'] = self.payer_ip_role_id", "if hasattr(self.collect_amt, 'to_alipay_dict'): params['collect_amt'] = self.collect_amt.to_alipay_dict() else: params['collect_amt'] = self.collect_amt if self.collect_date: if", "'ref_trans_no_type' in d: o.ref_trans_no_type = d['ref_trans_no_type'] if 'source' in d: o.source = d['source']", "o.source = d['source'] if 'status' in d: o.status = d['status'] if 'tnt_inst_id' in", "'to_alipay_dict'): params['collect_date'] = self.collect_date.to_alipay_dict() else: params['collect_date'] = self.collect_date if self.collect_status: if hasattr(self.collect_status, 'to_alipay_dict'):", "= d['status'] if 'tnt_inst_id' in d: o.tnt_inst_id = d['tnt_inst_id'] if 'used_amt' in d:", "None self._source = None self._status = None self._tnt_inst_id = None self._used_amt = None", "else: params['collect_amt'] = self.collect_amt if self.collect_date: if hasattr(self.collect_date, 'to_alipay_dict'): params['collect_date'] = self.collect_date.to_alipay_dict() else:", "@creator.setter def creator(self, value): self._creator = value @property def freeze_amt(self): return self._freeze_amt @freeze_amt.setter", "def gl_exchange_rate(self): return self._gl_exchange_rate @gl_exchange_rate.setter def gl_exchange_rate(self, value): self._gl_exchange_rate = value @property def", "self.payer_account_no.to_alipay_dict() else: params['payer_account_no'] = self.payer_account_no if self.payer_bank_branch_name: if hasattr(self.payer_bank_branch_name, 'to_alipay_dict'): params['payer_bank_branch_name'] = self.payer_bank_branch_name.to_alipay_dict()", "d['payee_account_name'] if 'payee_account_no' in d: o.payee_account_no = d['payee_account_no'] if 'payee_inst_id' in d: o.payee_inst_id", "None self._payee_inst_id = None self._payee_ip_role_id = None self._payer_account_name = None self._payer_account_no = None", "d['payer_account_name'] if 'payer_account_no' in d: o.payer_account_no = d['payer_account_no'] if 'payer_bank_branch_name' in d: o.payer_bank_branch_name", "* from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi", "@property def payer_ip_role_id(self): return self._payer_ip_role_id @payer_ip_role_id.setter def payer_ip_role_id(self, value): self._payer_ip_role_id = value @property", "= self.gl_exchange_rate if self.gmt_create: if hasattr(self.gmt_create, 'to_alipay_dict'): params['gmt_create'] = self.gmt_create.to_alipay_dict() else: params['gmt_create'] =", "in d: o.business_scene = d['business_scene'] if 'channel' in d: o.channel = d['channel'] if", "'status' in d: o.status = d['status'] if 'tnt_inst_id' in d: o.tnt_inst_id = d['tnt_inst_id']", "hasattr(self.status, 'to_alipay_dict'): params['status'] = self.status.to_alipay_dict() else: params['status'] = self.status if self.tnt_inst_id: if hasattr(self.tnt_inst_id,", "= None self._gmt_modified = None self._payee_account_name = None self._payee_account_no = None self._payee_inst_id =", "= d['ref_trans_no_type'] if 'source' in d: o.source = d['source'] if 'status' in d:", "params['bsn_no'] = self.bsn_no if self.bsn_ref_no: if hasattr(self.bsn_ref_no, 'to_alipay_dict'): params['bsn_ref_no'] = self.bsn_ref_no.to_alipay_dict() else: params['bsn_ref_no']", "@property def payee_account_no(self): return self._payee_account_no @payee_account_no.setter def payee_account_no(self, value): self._payee_account_no = value @property", "'collected_amt' in d: o.collected_amt = d['collected_amt'] if 'creator' in d: o.creator = d['creator']", "value): self._ref_trans_no_type = value @property def source(self): return self._source @source.setter def source(self, value):", "None self._ref_trans_no = None self._ref_trans_no_type = None self._source = None self._status = None", "in d: o.payer_inst_id = d['payer_inst_id'] if 'payer_ip_role_id' in d: o.payer_ip_role_id = d['payer_ip_role_id'] if", "o.receipt_no = d['receipt_no'] if 'ref_trans_no' in d: o.ref_trans_no = d['ref_trans_no'] if 'ref_trans_no_type' in", "return self._collected_amt @collected_amt.setter def collected_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collected_amt = value else:", "self.collect_amt: if hasattr(self.collect_amt, 'to_alipay_dict'): params['collect_amt'] = self.collect_amt.to_alipay_dict() else: params['collect_amt'] = self.collect_amt if self.collect_date:", "= self.bsn_ref_no.to_alipay_dict() else: params['bsn_ref_no'] = self.bsn_ref_no if self.business_scene: if hasattr(self.business_scene, 'to_alipay_dict'): params['business_scene'] =", "if self.creator: if hasattr(self.creator, 'to_alipay_dict'): params['creator'] = self.creator.to_alipay_dict() else: params['creator'] = self.creator if", "collected_amt(self): return self._collected_amt @collected_amt.setter def collected_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collected_amt = value", "hasattr(self.gmt_create, 'to_alipay_dict'): params['gmt_create'] = self.gmt_create.to_alipay_dict() else: params['gmt_create'] = self.gmt_create if self.gmt_modified: if hasattr(self.gmt_modified,", "@staticmethod def from_alipay_dict(d): if not d: return None o = CollectReceiptOpenApiDTO() if 'bsn_no'", "receipt_no(self): return self._receipt_no @receipt_no.setter def receipt_no(self, value): self._receipt_no = value @property def ref_trans_no(self):", "bsn_ref_no(self, value): self._bsn_ref_no = value @property def business_scene(self): return self._business_scene @business_scene.setter def business_scene(self,", "= None self._payee_ip_role_id = None self._payer_account_name = None self._payer_account_no = None self._payer_bank_branch_name =", "value @property def payee_ip_role_id(self): return self._payee_ip_role_id @payee_ip_role_id.setter def payee_ip_role_id(self, value): self._payee_ip_role_id = value", "d['channel'] if 'channel_log_no' in d: o.channel_log_no = d['channel_log_no'] if 'channel_memo' in d: o.channel_memo", "= d['fund_log_id'] if 'gl_exchange_rate' in d: o.gl_exchange_rate = d['gl_exchange_rate'] if 'gmt_create' in d:", "return self._channel_memo @channel_memo.setter def channel_memo(self, value): self._channel_memo = value @property def collect_amt(self): return", "None self._payer_account_name = None self._payer_account_no = None self._payer_bank_branch_name = None self._payer_inst_id = None", "= MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def fund_log_id(self): return self._fund_log_id @fund_log_id.setter def fund_log_id(self, value): self._fund_log_id =", "value): self._source = value @property def status(self): return self._status @status.setter def status(self, value):", "d['creator'] if 'freeze_amt' in d: o.freeze_amt = d['freeze_amt'] if 'fund_log_id' in d: o.fund_log_id", "o.gl_exchange_rate = d['gl_exchange_rate'] if 'gmt_create' in d: o.gmt_create = d['gmt_create'] if 'gmt_modified' in", "if self.payer_account_no: if hasattr(self.payer_account_no, 'to_alipay_dict'): params['payer_account_no'] = self.payer_account_no.to_alipay_dict() else: params['payer_account_no'] = self.payer_account_no if", "self._freeze_amt @freeze_amt.setter def freeze_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._freeze_amt = value else: self._freeze_amt", "= self.ref_trans_no_type if self.source: if hasattr(self.source, 'to_alipay_dict'): params['source'] = self.source.to_alipay_dict() else: params['source'] =", "-*- import json from alipay.aop.api.constant.ParamConstants import * from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi", "params['ref_trans_no'] = self.ref_trans_no if self.ref_trans_no_type: if hasattr(self.ref_trans_no_type, 'to_alipay_dict'): params['ref_trans_no_type'] = self.ref_trans_no_type.to_alipay_dict() else: params['ref_trans_no_type']", "isinstance(value, MultiCurrencyMoneyOpenApi): self._collected_amt = value else: self._collected_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def creator(self): return", "self.payee_ip_role_id: if hasattr(self.payee_ip_role_id, 'to_alipay_dict'): params['payee_ip_role_id'] = self.payee_ip_role_id.to_alipay_dict() else: params['payee_ip_role_id'] = self.payee_ip_role_id if self.payer_account_name:", "params['gl_exchange_rate'] = self.gl_exchange_rate.to_alipay_dict() else: params['gl_exchange_rate'] = self.gl_exchange_rate if self.gmt_create: if hasattr(self.gmt_create, 'to_alipay_dict'): params['gmt_create']", "= self.gmt_modified if self.payee_account_name: if hasattr(self.payee_account_name, 'to_alipay_dict'): params['payee_account_name'] = self.payee_account_name.to_alipay_dict() else: params['payee_account_name'] =", "= d['gmt_modified'] if 'payee_account_name' in d: o.payee_account_name = d['payee_account_name'] if 'payee_account_no' in d:", "'collect_status' in d: o.collect_status = d['collect_status'] if 'collected_amt' in d: o.collected_amt = d['collected_amt']", "in d: o.bsn_ref_no = d['bsn_ref_no'] if 'business_scene' in d: o.business_scene = d['business_scene'] if", "'to_alipay_dict'): params['freeze_amt'] = self.freeze_amt.to_alipay_dict() else: params['freeze_amt'] = self.freeze_amt if self.fund_log_id: if hasattr(self.fund_log_id, 'to_alipay_dict'):", "= self.freeze_amt if self.fund_log_id: if hasattr(self.fund_log_id, 'to_alipay_dict'): params['fund_log_id'] = self.fund_log_id.to_alipay_dict() else: params['fund_log_id'] =", "d: o.collect_date = d['collect_date'] if 'collect_status' in d: o.collect_status = d['collect_status'] if 'collected_amt'", "ref_trans_no_type(self, value): self._ref_trans_no_type = value @property def source(self): return self._source @source.setter def source(self,", "if hasattr(self.ref_trans_no_type, 'to_alipay_dict'): params['ref_trans_no_type'] = self.ref_trans_no_type.to_alipay_dict() else: params['ref_trans_no_type'] = self.ref_trans_no_type if self.source: if", "self._writeoff_relative_id = value def to_alipay_dict(self): params = dict() if self.bsn_no: if hasattr(self.bsn_no, 'to_alipay_dict'):", "def gmt_create(self, value): self._gmt_create = value @property def gmt_modified(self): return self._gmt_modified @gmt_modified.setter def", "MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi", "fund_log_id(self, value): self._fund_log_id = value @property def gl_exchange_rate(self): return self._gl_exchange_rate @gl_exchange_rate.setter def gl_exchange_rate(self,", "MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def collect_date(self): return self._collect_date @collect_date.setter def collect_date(self, value): self._collect_date = value", "self.bsn_no if self.bsn_ref_no: if hasattr(self.bsn_ref_no, 'to_alipay_dict'): params['bsn_ref_no'] = self.bsn_ref_no.to_alipay_dict() else: params['bsn_ref_no'] = self.bsn_ref_no", "class CollectReceiptOpenApiDTO(object): def __init__(self): self._bsn_no = None self._bsn_ref_no = None self._business_scene = None", "d: o.ref_trans_no_type = d['ref_trans_no_type'] if 'source' in d: o.source = d['source'] if 'status'", "return self._tnt_inst_id @tnt_inst_id.setter def tnt_inst_id(self, value): self._tnt_inst_id = value @property def used_amt(self): return", "else: params['writeoff_relative_id'] = self.writeoff_relative_id return params @staticmethod def from_alipay_dict(d): if not d: return", "value @property def payer_ip_role_id(self): return self._payer_ip_role_id @payer_ip_role_id.setter def payer_ip_role_id(self, value): self._payer_ip_role_id = value", "self._channel_log_no @channel_log_no.setter def channel_log_no(self, value): self._channel_log_no = value @property def channel_memo(self): return self._channel_memo", "hasattr(self.payee_ip_role_id, 'to_alipay_dict'): params['payee_ip_role_id'] = self.payee_ip_role_id.to_alipay_dict() else: params['payee_ip_role_id'] = self.payee_ip_role_id if self.payer_account_name: if hasattr(self.payer_account_name,", "'to_alipay_dict'): params['writeoff_relative_id'] = self.writeoff_relative_id.to_alipay_dict() else: params['writeoff_relative_id'] = self.writeoff_relative_id return params @staticmethod def from_alipay_dict(d):", "value @property def ref_trans_no(self): return self._ref_trans_no @ref_trans_no.setter def ref_trans_no(self, value): self._ref_trans_no = value", "params['ref_trans_no_type'] = self.ref_trans_no_type if self.source: if hasattr(self.source, 'to_alipay_dict'): params['source'] = self.source.to_alipay_dict() else: params['source']", "hasattr(self.source, 'to_alipay_dict'): params['source'] = self.source.to_alipay_dict() else: params['source'] = self.source if self.status: if hasattr(self.status,", "in d: o.fund_log_id = d['fund_log_id'] if 'gl_exchange_rate' in d: o.gl_exchange_rate = d['gl_exchange_rate'] if", "if self.channel: if hasattr(self.channel, 'to_alipay_dict'): params['channel'] = self.channel.to_alipay_dict() else: params['channel'] = self.channel if", "self.payee_inst_id if self.payee_ip_role_id: if hasattr(self.payee_ip_role_id, 'to_alipay_dict'): params['payee_ip_role_id'] = self.payee_ip_role_id.to_alipay_dict() else: params['payee_ip_role_id'] = self.payee_ip_role_id", "from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi class", "@channel_memo.setter def channel_memo(self, value): self._channel_memo = value @property def collect_amt(self): return self._collect_amt @collect_amt.setter", "return self._ref_trans_no @ref_trans_no.setter def ref_trans_no(self, value): self._ref_trans_no = value @property def ref_trans_no_type(self): return", "'channel_memo' in d: o.channel_memo = d['channel_memo'] if 'collect_amt' in d: o.collect_amt = d['collect_amt']", "return self._collect_status @collect_status.setter def collect_status(self, value): self._collect_status = value @property def collected_amt(self): return", "value): self._ref_trans_no = value @property def ref_trans_no_type(self): return self._ref_trans_no_type @ref_trans_no_type.setter def ref_trans_no_type(self, value):", "return self._ref_trans_no_type @ref_trans_no_type.setter def ref_trans_no_type(self, value): self._ref_trans_no_type = value @property def source(self): return", "if 'channel_log_no' in d: o.channel_log_no = d['channel_log_no'] if 'channel_memo' in d: o.channel_memo =", "in d: o.payer_account_no = d['payer_account_no'] if 'payer_bank_branch_name' in d: o.payer_bank_branch_name = d['payer_bank_branch_name'] if", "= value else: self._collected_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def creator(self): return self._creator @creator.setter def", "if self.collect_date: if hasattr(self.collect_date, 'to_alipay_dict'): params['collect_date'] = self.collect_date.to_alipay_dict() else: params['collect_date'] = self.collect_date if", "o.fund_log_id = d['fund_log_id'] if 'gl_exchange_rate' in d: o.gl_exchange_rate = d['gl_exchange_rate'] if 'gmt_create' in", "if hasattr(self.fund_log_id, 'to_alipay_dict'): params['fund_log_id'] = self.fund_log_id.to_alipay_dict() else: params['fund_log_id'] = self.fund_log_id if self.gl_exchange_rate: if", "= d['payer_bank_branch_name'] if 'payer_inst_id' in d: o.payer_inst_id = d['payer_inst_id'] if 'payer_ip_role_id' in d:", "'payer_ip_role_id' in d: o.payer_ip_role_id = d['payer_ip_role_id'] if 'receipt_no' in d: o.receipt_no = d['receipt_no']", "if not d: return None o = CollectReceiptOpenApiDTO() if 'bsn_no' in d: o.bsn_no", "self._payee_account_no = value @property def payee_inst_id(self): return self._payee_inst_id @payee_inst_id.setter def payee_inst_id(self, value): self._payee_inst_id", "@payer_account_name.setter def payer_account_name(self, value): self._payer_account_name = value @property def payer_account_no(self): return self._payer_account_no @payer_account_no.setter", "self._collect_amt = value else: self._collect_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def collect_date(self): return self._collect_date @collect_date.setter", "= None self._writeoff_relative_id = None @property def bsn_no(self): return self._bsn_no @bsn_no.setter def bsn_no(self,", "= self.payee_account_no.to_alipay_dict() else: params['payee_account_no'] = self.payee_account_no if self.payee_inst_id: if hasattr(self.payee_inst_id, 'to_alipay_dict'): params['payee_inst_id'] =", "hasattr(self.ref_trans_no_type, 'to_alipay_dict'): params['ref_trans_no_type'] = self.ref_trans_no_type.to_alipay_dict() else: params['ref_trans_no_type'] = self.ref_trans_no_type if self.source: if hasattr(self.source,", "def payee_ip_role_id(self, value): self._payee_ip_role_id = value @property def payer_account_name(self): return self._payer_account_name @payer_account_name.setter def", "self.collect_date if self.collect_status: if hasattr(self.collect_status, 'to_alipay_dict'): params['collect_status'] = self.collect_status.to_alipay_dict() else: params['collect_status'] = self.collect_status", "'to_alipay_dict'): params['payer_ip_role_id'] = self.payer_ip_role_id.to_alipay_dict() else: params['payer_ip_role_id'] = self.payer_ip_role_id if self.receipt_no: if hasattr(self.receipt_no, 'to_alipay_dict'):", "return self._payee_ip_role_id @payee_ip_role_id.setter def payee_ip_role_id(self, value): self._payee_ip_role_id = value @property def payer_account_name(self): return", "'bsn_ref_no' in d: o.bsn_ref_no = d['bsn_ref_no'] if 'business_scene' in d: o.business_scene = d['business_scene']", "d: o.gmt_create = d['gmt_create'] if 'gmt_modified' in d: o.gmt_modified = d['gmt_modified'] if 'payee_account_name'", "None self._payer_inst_id = None self._payer_ip_role_id = None self._receipt_no = None self._ref_trans_no = None", "= value else: self._collect_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def collect_date(self): return self._collect_date @collect_date.setter def", "def ref_trans_no(self): return self._ref_trans_no @ref_trans_no.setter def ref_trans_no(self, value): self._ref_trans_no = value @property def", "params['creator'] = self.creator if self.freeze_amt: if hasattr(self.freeze_amt, 'to_alipay_dict'): params['freeze_amt'] = self.freeze_amt.to_alipay_dict() else: params['freeze_amt']", "self.collected_amt: if hasattr(self.collected_amt, 'to_alipay_dict'): params['collected_amt'] = self.collected_amt.to_alipay_dict() else: params['collected_amt'] = self.collected_amt if self.creator:", "'collect_amt' in d: o.collect_amt = d['collect_amt'] if 'collect_date' in d: o.collect_date = d['collect_date']", "hasattr(self.channel, 'to_alipay_dict'): params['channel'] = self.channel.to_alipay_dict() else: params['channel'] = self.channel if self.channel_log_no: if hasattr(self.channel_log_no,", "value @property def used_amt(self): return self._used_amt @used_amt.setter def used_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi):", "def channel_log_no(self): return self._channel_log_no @channel_log_no.setter def channel_log_no(self, value): self._channel_log_no = value @property def", "= value @property def tnt_inst_id(self): return self._tnt_inst_id @tnt_inst_id.setter def tnt_inst_id(self, value): self._tnt_inst_id =", "def channel_memo(self, value): self._channel_memo = value @property def collect_amt(self): return self._collect_amt @collect_amt.setter def", "in d: o.tnt_inst_id = d['tnt_inst_id'] if 'used_amt' in d: o.used_amt = d['used_amt'] if", "= value @property def payee_ip_role_id(self): return self._payee_ip_role_id @payee_ip_role_id.setter def payee_ip_role_id(self, value): self._payee_ip_role_id =", "# -*- coding: utf-8 -*- import json from alipay.aop.api.constant.ParamConstants import * from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi", "= self.writeoff_relative_id.to_alipay_dict() else: params['writeoff_relative_id'] = self.writeoff_relative_id return params @staticmethod def from_alipay_dict(d): if not", "params['channel_log_no'] = self.channel_log_no if self.channel_memo: if hasattr(self.channel_memo, 'to_alipay_dict'): params['channel_memo'] = self.channel_memo.to_alipay_dict() else: params['channel_memo']", "= self.business_scene.to_alipay_dict() else: params['business_scene'] = self.business_scene if self.channel: if hasattr(self.channel, 'to_alipay_dict'): params['channel'] =", "else: params['used_amt'] = self.used_amt if self.writeoff_relative_id: if hasattr(self.writeoff_relative_id, 'to_alipay_dict'): params['writeoff_relative_id'] = self.writeoff_relative_id.to_alipay_dict() else:", "@property def creator(self): return self._creator @creator.setter def creator(self, value): self._creator = value @property", "hasattr(self.payer_bank_branch_name, 'to_alipay_dict'): params['payer_bank_branch_name'] = self.payer_bank_branch_name.to_alipay_dict() else: params['payer_bank_branch_name'] = self.payer_bank_branch_name if self.payer_inst_id: if hasattr(self.payer_inst_id,", "payer_account_no(self): return self._payer_account_no @payer_account_no.setter def payer_account_no(self, value): self._payer_account_no = value @property def payer_bank_branch_name(self):", "'tnt_inst_id' in d: o.tnt_inst_id = d['tnt_inst_id'] if 'used_amt' in d: o.used_amt = d['used_amt']", "self._payer_inst_id = value @property def payer_ip_role_id(self): return self._payer_ip_role_id @payer_ip_role_id.setter def payer_ip_role_id(self, value): self._payer_ip_role_id", "= self.source.to_alipay_dict() else: params['source'] = self.source if self.status: if hasattr(self.status, 'to_alipay_dict'): params['status'] =", "in d: o.collected_amt = d['collected_amt'] if 'creator' in d: o.creator = d['creator'] if", "payer_account_name(self): return self._payer_account_name @payer_account_name.setter def payer_account_name(self, value): self._payer_account_name = value @property def payer_account_no(self):", "else: params['status'] = self.status if self.tnt_inst_id: if hasattr(self.tnt_inst_id, 'to_alipay_dict'): params['tnt_inst_id'] = self.tnt_inst_id.to_alipay_dict() else:", "@property def collect_status(self): return self._collect_status @collect_status.setter def collect_status(self, value): self._collect_status = value @property", "= self.channel_memo if self.collect_amt: if hasattr(self.collect_amt, 'to_alipay_dict'): params['collect_amt'] = self.collect_amt.to_alipay_dict() else: params['collect_amt'] =", "params['creator'] = self.creator.to_alipay_dict() else: params['creator'] = self.creator if self.freeze_amt: if hasattr(self.freeze_amt, 'to_alipay_dict'): params['freeze_amt']", "@payee_ip_role_id.setter def payee_ip_role_id(self, value): self._payee_ip_role_id = value @property def payer_account_name(self): return self._payer_account_name @payer_account_name.setter", "= MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def writeoff_relative_id(self): return self._writeoff_relative_id @writeoff_relative_id.setter def writeoff_relative_id(self, value): self._writeoff_relative_id =", "self._status = value @property def tnt_inst_id(self): return self._tnt_inst_id @tnt_inst_id.setter def tnt_inst_id(self, value): self._tnt_inst_id", "import json from alipay.aop.api.constant.ParamConstants import * from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import", "def payer_ip_role_id(self, value): self._payer_ip_role_id = value @property def receipt_no(self): return self._receipt_no @receipt_no.setter def", "params['payee_inst_id'] = self.payee_inst_id if self.payee_ip_role_id: if hasattr(self.payee_ip_role_id, 'to_alipay_dict'): params['payee_ip_role_id'] = self.payee_ip_role_id.to_alipay_dict() else: params['payee_ip_role_id']", "d['collected_amt'] if 'creator' in d: o.creator = d['creator'] if 'freeze_amt' in d: o.freeze_amt", "d['gmt_modified'] if 'payee_account_name' in d: o.payee_account_name = d['payee_account_name'] if 'payee_account_no' in d: o.payee_account_no", "= None self._payer_inst_id = None self._payer_ip_role_id = None self._receipt_no = None self._ref_trans_no =", "if self.ref_trans_no_type: if hasattr(self.ref_trans_no_type, 'to_alipay_dict'): params['ref_trans_no_type'] = self.ref_trans_no_type.to_alipay_dict() else: params['ref_trans_no_type'] = self.ref_trans_no_type if", "self.writeoff_relative_id: if hasattr(self.writeoff_relative_id, 'to_alipay_dict'): params['writeoff_relative_id'] = self.writeoff_relative_id.to_alipay_dict() else: params['writeoff_relative_id'] = self.writeoff_relative_id return params", "value): self._business_scene = value @property def channel(self): return self._channel @channel.setter def channel(self, value):", "hasattr(self.freeze_amt, 'to_alipay_dict'): params['freeze_amt'] = self.freeze_amt.to_alipay_dict() else: params['freeze_amt'] = self.freeze_amt if self.fund_log_id: if hasattr(self.fund_log_id,", "payee_account_no(self, value): self._payee_account_no = value @property def payee_inst_id(self): return self._payee_inst_id @payee_inst_id.setter def payee_inst_id(self,", "o.payer_inst_id = d['payer_inst_id'] if 'payer_ip_role_id' in d: o.payer_ip_role_id = d['payer_ip_role_id'] if 'receipt_no' in", "= self.receipt_no.to_alipay_dict() else: params['receipt_no'] = self.receipt_no if self.ref_trans_no: if hasattr(self.ref_trans_no, 'to_alipay_dict'): params['ref_trans_no'] =", "payer_bank_branch_name(self, value): self._payer_bank_branch_name = value @property def payer_inst_id(self): return self._payer_inst_id @payer_inst_id.setter def payer_inst_id(self,", "if self.status: if hasattr(self.status, 'to_alipay_dict'): params['status'] = self.status.to_alipay_dict() else: params['status'] = self.status if", "= CollectReceiptOpenApiDTO() if 'bsn_no' in d: o.bsn_no = d['bsn_no'] if 'bsn_ref_no' in d:", "if 'ref_trans_no_type' in d: o.ref_trans_no_type = d['ref_trans_no_type'] if 'source' in d: o.source =", "self._receipt_no = value @property def ref_trans_no(self): return self._ref_trans_no @ref_trans_no.setter def ref_trans_no(self, value): self._ref_trans_no", "freeze_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._freeze_amt = value else: self._freeze_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property", "= self.channel_log_no.to_alipay_dict() else: params['channel_log_no'] = self.channel_log_no if self.channel_memo: if hasattr(self.channel_memo, 'to_alipay_dict'): params['channel_memo'] =", "hasattr(self.payer_ip_role_id, 'to_alipay_dict'): params['payer_ip_role_id'] = self.payer_ip_role_id.to_alipay_dict() else: params['payer_ip_role_id'] = self.payer_ip_role_id if self.receipt_no: if hasattr(self.receipt_no,", "return self._payee_account_no @payee_account_no.setter def payee_account_no(self, value): self._payee_account_no = value @property def payee_inst_id(self): return", "params['payee_account_no'] = self.payee_account_no.to_alipay_dict() else: params['payee_account_no'] = self.payee_account_no if self.payee_inst_id: if hasattr(self.payee_inst_id, 'to_alipay_dict'): params['payee_inst_id']", "= self.status if self.tnt_inst_id: if hasattr(self.tnt_inst_id, 'to_alipay_dict'): params['tnt_inst_id'] = self.tnt_inst_id.to_alipay_dict() else: params['tnt_inst_id'] =", "'freeze_amt' in d: o.freeze_amt = d['freeze_amt'] if 'fund_log_id' in d: o.fund_log_id = d['fund_log_id']", "if hasattr(self.payee_inst_id, 'to_alipay_dict'): params['payee_inst_id'] = self.payee_inst_id.to_alipay_dict() else: params['payee_inst_id'] = self.payee_inst_id if self.payee_ip_role_id: if", "@property def source(self): return self._source @source.setter def source(self, value): self._source = value @property", "params['business_scene'] = self.business_scene.to_alipay_dict() else: params['business_scene'] = self.business_scene if self.channel: if hasattr(self.channel, 'to_alipay_dict'): params['channel']", "'business_scene' in d: o.business_scene = d['business_scene'] if 'channel' in d: o.channel = d['channel']", "'channel_log_no' in d: o.channel_log_no = d['channel_log_no'] if 'channel_memo' in d: o.channel_memo = d['channel_memo']", "= d['tnt_inst_id'] if 'used_amt' in d: o.used_amt = d['used_amt'] if 'writeoff_relative_id' in d:", "return self._channel_log_no @channel_log_no.setter def channel_log_no(self, value): self._channel_log_no = value @property def channel_memo(self): return", "'bsn_no' in d: o.bsn_no = d['bsn_no'] if 'bsn_ref_no' in d: o.bsn_ref_no = d['bsn_ref_no']", "o.payee_ip_role_id = d['payee_ip_role_id'] if 'payer_account_name' in d: o.payer_account_name = d['payer_account_name'] if 'payer_account_no' in", "self._channel_memo = value @property def collect_amt(self): return self._collect_amt @collect_amt.setter def collect_amt(self, value): if", "self.gmt_modified.to_alipay_dict() else: params['gmt_modified'] = self.gmt_modified if self.payee_account_name: if hasattr(self.payee_account_name, 'to_alipay_dict'): params['payee_account_name'] = self.payee_account_name.to_alipay_dict()", "params['channel'] = self.channel if self.channel_log_no: if hasattr(self.channel_log_no, 'to_alipay_dict'): params['channel_log_no'] = self.channel_log_no.to_alipay_dict() else: params['channel_log_no']", "params['payer_bank_branch_name'] = self.payer_bank_branch_name if self.payer_inst_id: if hasattr(self.payer_inst_id, 'to_alipay_dict'): params['payer_inst_id'] = self.payer_inst_id.to_alipay_dict() else: params['payer_inst_id']", "ref_trans_no(self): return self._ref_trans_no @ref_trans_no.setter def ref_trans_no(self, value): self._ref_trans_no = value @property def ref_trans_no_type(self):", "= value @property def channel_log_no(self): return self._channel_log_no @channel_log_no.setter def channel_log_no(self, value): self._channel_log_no =", "value): self._payee_inst_id = value @property def payee_ip_role_id(self): return self._payee_ip_role_id @payee_ip_role_id.setter def payee_ip_role_id(self, value):", "def ref_trans_no_type(self, value): self._ref_trans_no_type = value @property def source(self): return self._source @source.setter def", "@tnt_inst_id.setter def tnt_inst_id(self, value): self._tnt_inst_id = value @property def used_amt(self): return self._used_amt @used_amt.setter", "value): self._payee_ip_role_id = value @property def payer_account_name(self): return self._payer_account_name @payer_account_name.setter def payer_account_name(self, value):", "if 'payee_inst_id' in d: o.payee_inst_id = d['payee_inst_id'] if 'payee_ip_role_id' in d: o.payee_ip_role_id =", "'to_alipay_dict'): params['used_amt'] = self.used_amt.to_alipay_dict() else: params['used_amt'] = self.used_amt if self.writeoff_relative_id: if hasattr(self.writeoff_relative_id, 'to_alipay_dict'):", "else: params['gmt_create'] = self.gmt_create if self.gmt_modified: if hasattr(self.gmt_modified, 'to_alipay_dict'): params['gmt_modified'] = self.gmt_modified.to_alipay_dict() else:", "self._channel = None self._channel_log_no = None self._channel_memo = None self._collect_amt = None self._collect_date", "else: self._collect_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def collect_date(self): return self._collect_date @collect_date.setter def collect_date(self, value):", "def freeze_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._freeze_amt = value else: self._freeze_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value)", "params['collected_amt'] = self.collected_amt.to_alipay_dict() else: params['collected_amt'] = self.collected_amt if self.creator: if hasattr(self.creator, 'to_alipay_dict'): params['creator']", "if hasattr(self.gmt_create, 'to_alipay_dict'): params['gmt_create'] = self.gmt_create.to_alipay_dict() else: params['gmt_create'] = self.gmt_create if self.gmt_modified: if", "d: o.payee_inst_id = d['payee_inst_id'] if 'payee_ip_role_id' in d: o.payee_ip_role_id = d['payee_ip_role_id'] if 'payer_account_name'", "None self._payee_account_no = None self._payee_inst_id = None self._payee_ip_role_id = None self._payer_account_name = None", "'ref_trans_no' in d: o.ref_trans_no = d['ref_trans_no'] if 'ref_trans_no_type' in d: o.ref_trans_no_type = d['ref_trans_no_type']", "@writeoff_relative_id.setter def writeoff_relative_id(self, value): self._writeoff_relative_id = value def to_alipay_dict(self): params = dict() if", "if hasattr(self.payee_ip_role_id, 'to_alipay_dict'): params['payee_ip_role_id'] = self.payee_ip_role_id.to_alipay_dict() else: params['payee_ip_role_id'] = self.payee_ip_role_id if self.payer_account_name: if", "d['collect_amt'] if 'collect_date' in d: o.collect_date = d['collect_date'] if 'collect_status' in d: o.collect_status", "@payee_account_no.setter def payee_account_no(self, value): self._payee_account_no = value @property def payee_inst_id(self): return self._payee_inst_id @payee_inst_id.setter", "self.channel_memo: if hasattr(self.channel_memo, 'to_alipay_dict'): params['channel_memo'] = self.channel_memo.to_alipay_dict() else: params['channel_memo'] = self.channel_memo if self.collect_amt:", "= d['freeze_amt'] if 'fund_log_id' in d: o.fund_log_id = d['fund_log_id'] if 'gl_exchange_rate' in d:", "@gmt_create.setter def gmt_create(self, value): self._gmt_create = value @property def gmt_modified(self): return self._gmt_modified @gmt_modified.setter", "d['gmt_create'] if 'gmt_modified' in d: o.gmt_modified = d['gmt_modified'] if 'payee_account_name' in d: o.payee_account_name", "@ref_trans_no_type.setter def ref_trans_no_type(self, value): self._ref_trans_no_type = value @property def source(self): return self._source @source.setter", "from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi class CollectReceiptOpenApiDTO(object): def __init__(self): self._bsn_no = None self._bsn_ref_no =", "def freeze_amt(self): return self._freeze_amt @freeze_amt.setter def freeze_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._freeze_amt =", "def used_amt(self): return self._used_amt @used_amt.setter def used_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._used_amt =", "if hasattr(self.payer_ip_role_id, 'to_alipay_dict'): params['payer_ip_role_id'] = self.payer_ip_role_id.to_alipay_dict() else: params['payer_ip_role_id'] = self.payer_ip_role_id if self.receipt_no: if", "= self.tnt_inst_id if self.used_amt: if hasattr(self.used_amt, 'to_alipay_dict'): params['used_amt'] = self.used_amt.to_alipay_dict() else: params['used_amt'] =", "if self.writeoff_relative_id: if hasattr(self.writeoff_relative_id, 'to_alipay_dict'): params['writeoff_relative_id'] = self.writeoff_relative_id.to_alipay_dict() else: params['writeoff_relative_id'] = self.writeoff_relative_id return", "if 'payer_inst_id' in d: o.payer_inst_id = d['payer_inst_id'] if 'payer_ip_role_id' in d: o.payer_ip_role_id =", "def collect_status(self): return self._collect_status @collect_status.setter def collect_status(self, value): self._collect_status = value @property def", "'to_alipay_dict'): params['payer_account_no'] = self.payer_account_no.to_alipay_dict() else: params['payer_account_no'] = self.payer_account_no if self.payer_bank_branch_name: if hasattr(self.payer_bank_branch_name, 'to_alipay_dict'):", "if isinstance(value, MultiCurrencyMoneyOpenApi): self._used_amt = value else: self._used_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def writeoff_relative_id(self):", "return self._payer_account_name @payer_account_name.setter def payer_account_name(self, value): self._payer_account_name = value @property def payer_account_no(self): return", "if 'freeze_amt' in d: o.freeze_amt = d['freeze_amt'] if 'fund_log_id' in d: o.fund_log_id =", "self.tnt_inst_id.to_alipay_dict() else: params['tnt_inst_id'] = self.tnt_inst_id if self.used_amt: if hasattr(self.used_amt, 'to_alipay_dict'): params['used_amt'] = self.used_amt.to_alipay_dict()", "params['payee_account_name'] = self.payee_account_name.to_alipay_dict() else: params['payee_account_name'] = self.payee_account_name if self.payee_account_no: if hasattr(self.payee_account_no, 'to_alipay_dict'): params['payee_account_no']", "self._payer_account_no @payer_account_no.setter def payer_account_no(self, value): self._payer_account_no = value @property def payer_bank_branch_name(self): return self._payer_bank_branch_name", "self._writeoff_relative_id @writeoff_relative_id.setter def writeoff_relative_id(self, value): self._writeoff_relative_id = value def to_alipay_dict(self): params = dict()", "params['tnt_inst_id'] = self.tnt_inst_id if self.used_amt: if hasattr(self.used_amt, 'to_alipay_dict'): params['used_amt'] = self.used_amt.to_alipay_dict() else: params['used_amt']", "o.channel = d['channel'] if 'channel_log_no' in d: o.channel_log_no = d['channel_log_no'] if 'channel_memo' in", "def channel_memo(self): return self._channel_memo @channel_memo.setter def channel_memo(self, value): self._channel_memo = value @property def", "@payee_account_name.setter def payee_account_name(self, value): self._payee_account_name = value @property def payee_account_no(self): return self._payee_account_no @payee_account_no.setter", "= value @property def ref_trans_no(self): return self._ref_trans_no @ref_trans_no.setter def ref_trans_no(self, value): self._ref_trans_no =", "= self.gmt_create if self.gmt_modified: if hasattr(self.gmt_modified, 'to_alipay_dict'): params['gmt_modified'] = self.gmt_modified.to_alipay_dict() else: params['gmt_modified'] =", "self._bsn_ref_no = value @property def business_scene(self): return self._business_scene @business_scene.setter def business_scene(self, value): self._business_scene", "params['ref_trans_no'] = self.ref_trans_no.to_alipay_dict() else: params['ref_trans_no'] = self.ref_trans_no if self.ref_trans_no_type: if hasattr(self.ref_trans_no_type, 'to_alipay_dict'): params['ref_trans_no_type']", "else: params['gmt_modified'] = self.gmt_modified if self.payee_account_name: if hasattr(self.payee_account_name, 'to_alipay_dict'): params['payee_account_name'] = self.payee_account_name.to_alipay_dict() else:", "= self.fund_log_id.to_alipay_dict() else: params['fund_log_id'] = self.fund_log_id if self.gl_exchange_rate: if hasattr(self.gl_exchange_rate, 'to_alipay_dict'): params['gl_exchange_rate'] =", "not d: return None o = CollectReceiptOpenApiDTO() if 'bsn_no' in d: o.bsn_no =", "if self.gmt_modified: if hasattr(self.gmt_modified, 'to_alipay_dict'): params['gmt_modified'] = self.gmt_modified.to_alipay_dict() else: params['gmt_modified'] = self.gmt_modified if", "= d['business_scene'] if 'channel' in d: o.channel = d['channel'] if 'channel_log_no' in d:", "return self._payee_inst_id @payee_inst_id.setter def payee_inst_id(self, value): self._payee_inst_id = value @property def payee_ip_role_id(self): return", "= self.collected_amt if self.creator: if hasattr(self.creator, 'to_alipay_dict'): params['creator'] = self.creator.to_alipay_dict() else: params['creator'] =", "'payee_ip_role_id' in d: o.payee_ip_role_id = d['payee_ip_role_id'] if 'payer_account_name' in d: o.payer_account_name = d['payer_account_name']", "if hasattr(self.payee_account_no, 'to_alipay_dict'): params['payee_account_no'] = self.payee_account_no.to_alipay_dict() else: params['payee_account_no'] = self.payee_account_no if self.payee_inst_id: if", "d: o.payer_ip_role_id = d['payer_ip_role_id'] if 'receipt_no' in d: o.receipt_no = d['receipt_no'] if 'ref_trans_no'", "self.used_amt: if hasattr(self.used_amt, 'to_alipay_dict'): params['used_amt'] = self.used_amt.to_alipay_dict() else: params['used_amt'] = self.used_amt if self.writeoff_relative_id:", "= None self._gmt_create = None self._gmt_modified = None self._payee_account_name = None self._payee_account_no =", "self._gmt_create = None self._gmt_modified = None self._payee_account_name = None self._payee_account_no = None self._payee_inst_id", "params['collect_amt'] = self.collect_amt if self.collect_date: if hasattr(self.collect_date, 'to_alipay_dict'): params['collect_date'] = self.collect_date.to_alipay_dict() else: params['collect_date']", "hasattr(self.collect_date, 'to_alipay_dict'): params['collect_date'] = self.collect_date.to_alipay_dict() else: params['collect_date'] = self.collect_date if self.collect_status: if hasattr(self.collect_status,", "self._payer_inst_id = None self._payer_ip_role_id = None self._receipt_no = None self._ref_trans_no = None self._ref_trans_no_type", "= self.payer_account_name.to_alipay_dict() else: params['payer_account_name'] = self.payer_account_name if self.payer_account_no: if hasattr(self.payer_account_no, 'to_alipay_dict'): params['payer_account_no'] =", "self.payer_account_name.to_alipay_dict() else: params['payer_account_name'] = self.payer_account_name if self.payer_account_no: if hasattr(self.payer_account_no, 'to_alipay_dict'): params['payer_account_no'] = self.payer_account_no.to_alipay_dict()", "in d: o.payer_ip_role_id = d['payer_ip_role_id'] if 'receipt_no' in d: o.receipt_no = d['receipt_no'] if", "return self._bsn_no @bsn_no.setter def bsn_no(self, value): self._bsn_no = value @property def bsn_ref_no(self): return", "json from alipay.aop.api.constant.ParamConstants import * from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi", "fund_log_id(self): return self._fund_log_id @fund_log_id.setter def fund_log_id(self, value): self._fund_log_id = value @property def gl_exchange_rate(self):", "d['ref_trans_no_type'] if 'source' in d: o.source = d['source'] if 'status' in d: o.status", "self._payee_ip_role_id = value @property def payer_account_name(self): return self._payer_account_name @payer_account_name.setter def payer_account_name(self, value): self._payer_account_name", "else: params['creator'] = self.creator if self.freeze_amt: if hasattr(self.freeze_amt, 'to_alipay_dict'): params['freeze_amt'] = self.freeze_amt.to_alipay_dict() else:", "if self.collected_amt: if hasattr(self.collected_amt, 'to_alipay_dict'): params['collected_amt'] = self.collected_amt.to_alipay_dict() else: params['collected_amt'] = self.collected_amt if", "used_amt(self): return self._used_amt @used_amt.setter def used_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._used_amt = value", "gmt_modified(self, value): self._gmt_modified = value @property def payee_account_name(self): return self._payee_account_name @payee_account_name.setter def payee_account_name(self,", "o.payer_account_name = d['payer_account_name'] if 'payer_account_no' in d: o.payer_account_no = d['payer_account_no'] if 'payer_bank_branch_name' in", "return self._collect_date @collect_date.setter def collect_date(self, value): self._collect_date = value @property def collect_status(self): return", "self._source = None self._status = None self._tnt_inst_id = None self._used_amt = None self._writeoff_relative_id", "@property def bsn_no(self): return self._bsn_no @bsn_no.setter def bsn_no(self, value): self._bsn_no = value @property", "None self._writeoff_relative_id = None @property def bsn_no(self): return self._bsn_no @bsn_no.setter def bsn_no(self, value):", "= value @property def collect_amt(self): return self._collect_amt @collect_amt.setter def collect_amt(self, value): if isinstance(value,", "self._tnt_inst_id @tnt_inst_id.setter def tnt_inst_id(self, value): self._tnt_inst_id = value @property def used_amt(self): return self._used_amt", "value): self._payee_account_name = value @property def payee_account_no(self): return self._payee_account_no @payee_account_no.setter def payee_account_no(self, value):", "self.payee_account_no.to_alipay_dict() else: params['payee_account_no'] = self.payee_account_no if self.payee_inst_id: if hasattr(self.payee_inst_id, 'to_alipay_dict'): params['payee_inst_id'] = self.payee_inst_id.to_alipay_dict()", "d['gl_exchange_rate'] if 'gmt_create' in d: o.gmt_create = d['gmt_create'] if 'gmt_modified' in d: o.gmt_modified", "params = dict() if self.bsn_no: if hasattr(self.bsn_no, 'to_alipay_dict'): params['bsn_no'] = self.bsn_no.to_alipay_dict() else: params['bsn_no']", "'receipt_no' in d: o.receipt_no = d['receipt_no'] if 'ref_trans_no' in d: o.ref_trans_no = d['ref_trans_no']", "else: params['bsn_no'] = self.bsn_no if self.bsn_ref_no: if hasattr(self.bsn_ref_no, 'to_alipay_dict'): params['bsn_ref_no'] = self.bsn_ref_no.to_alipay_dict() else:", "params['writeoff_relative_id'] = self.writeoff_relative_id.to_alipay_dict() else: params['writeoff_relative_id'] = self.writeoff_relative_id return params @staticmethod def from_alipay_dict(d): if", "collect_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collect_amt = value else: self._collect_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property", "= value @property def used_amt(self): return self._used_amt @used_amt.setter def used_amt(self, value): if isinstance(value,", "= d['channel_memo'] if 'collect_amt' in d: o.collect_amt = d['collect_amt'] if 'collect_date' in d:", "o.business_scene = d['business_scene'] if 'channel' in d: o.channel = d['channel'] if 'channel_log_no' in", "= self.ref_trans_no_type.to_alipay_dict() else: params['ref_trans_no_type'] = self.ref_trans_no_type if self.source: if hasattr(self.source, 'to_alipay_dict'): params['source'] =", "if hasattr(self.collect_date, 'to_alipay_dict'): params['collect_date'] = self.collect_date.to_alipay_dict() else: params['collect_date'] = self.collect_date if self.collect_status: if", "self.payer_account_name if self.payer_account_no: if hasattr(self.payer_account_no, 'to_alipay_dict'): params['payer_account_no'] = self.payer_account_no.to_alipay_dict() else: params['payer_account_no'] = self.payer_account_no", "coding: utf-8 -*- import json from alipay.aop.api.constant.ParamConstants import * from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi", "= value @property def business_scene(self): return self._business_scene @business_scene.setter def business_scene(self, value): self._business_scene =", "value): self._channel_log_no = value @property def channel_memo(self): return self._channel_memo @channel_memo.setter def channel_memo(self, value):", "= value @property def status(self): return self._status @status.setter def status(self, value): self._status =", "'used_amt' in d: o.used_amt = d['used_amt'] if 'writeoff_relative_id' in d: o.writeoff_relative_id = d['writeoff_relative_id']", "d['payee_account_no'] if 'payee_inst_id' in d: o.payee_inst_id = d['payee_inst_id'] if 'payee_ip_role_id' in d: o.payee_ip_role_id", "= None self._channel_log_no = None self._channel_memo = None self._collect_amt = None self._collect_date =", "payee_inst_id(self, value): self._payee_inst_id = value @property def payee_ip_role_id(self): return self._payee_ip_role_id @payee_ip_role_id.setter def payee_ip_role_id(self,", "in d: o.collect_date = d['collect_date'] if 'collect_status' in d: o.collect_status = d['collect_status'] if", "value): self._bsn_ref_no = value @property def business_scene(self): return self._business_scene @business_scene.setter def business_scene(self, value):", "def __init__(self): self._bsn_no = None self._bsn_ref_no = None self._business_scene = None self._channel =", "@property def bsn_ref_no(self): return self._bsn_ref_no @bsn_ref_no.setter def bsn_ref_no(self, value): self._bsn_ref_no = value @property", "bsn_ref_no(self): return self._bsn_ref_no @bsn_ref_no.setter def bsn_ref_no(self, value): self._bsn_ref_no = value @property def business_scene(self):", "hasattr(self.gmt_modified, 'to_alipay_dict'): params['gmt_modified'] = self.gmt_modified.to_alipay_dict() else: params['gmt_modified'] = self.gmt_modified if self.payee_account_name: if hasattr(self.payee_account_name,", "def channel_log_no(self, value): self._channel_log_no = value @property def channel_memo(self): return self._channel_memo @channel_memo.setter def", "in d: o.payer_bank_branch_name = d['payer_bank_branch_name'] if 'payer_inst_id' in d: o.payer_inst_id = d['payer_inst_id'] if", "@fund_log_id.setter def fund_log_id(self, value): self._fund_log_id = value @property def gl_exchange_rate(self): return self._gl_exchange_rate @gl_exchange_rate.setter", "= None self._payee_account_no = None self._payee_inst_id = None self._payee_ip_role_id = None self._payer_account_name =", "self._collect_status = value @property def collected_amt(self): return self._collected_amt @collected_amt.setter def collected_amt(self, value): if", "None self._collected_amt = None self._creator = None self._freeze_amt = None self._fund_log_id = None", "if 'creator' in d: o.creator = d['creator'] if 'freeze_amt' in d: o.freeze_amt =", "if hasattr(self.payer_bank_branch_name, 'to_alipay_dict'): params['payer_bank_branch_name'] = self.payer_bank_branch_name.to_alipay_dict() else: params['payer_bank_branch_name'] = self.payer_bank_branch_name if self.payer_inst_id: if", "payer_account_name(self, value): self._payer_account_name = value @property def payer_account_no(self): return self._payer_account_no @payer_account_no.setter def payer_account_no(self,", "bsn_no(self, value): self._bsn_no = value @property def bsn_ref_no(self): return self._bsn_ref_no @bsn_ref_no.setter def bsn_ref_no(self,", "return self._gl_exchange_rate @gl_exchange_rate.setter def gl_exchange_rate(self, value): self._gl_exchange_rate = value @property def gmt_create(self): return", "self.creator if self.freeze_amt: if hasattr(self.freeze_amt, 'to_alipay_dict'): params['freeze_amt'] = self.freeze_amt.to_alipay_dict() else: params['freeze_amt'] = self.freeze_amt", "= value @property def payee_account_no(self): return self._payee_account_no @payee_account_no.setter def payee_account_no(self, value): self._payee_account_no =", "= None self._used_amt = None self._writeoff_relative_id = None @property def bsn_no(self): return self._bsn_no", "if 'receipt_no' in d: o.receipt_no = d['receipt_no'] if 'ref_trans_no' in d: o.ref_trans_no =", "self.bsn_no: if hasattr(self.bsn_no, 'to_alipay_dict'): params['bsn_no'] = self.bsn_no.to_alipay_dict() else: params['bsn_no'] = self.bsn_no if self.bsn_ref_no:", "'to_alipay_dict'): params['collect_status'] = self.collect_status.to_alipay_dict() else: params['collect_status'] = self.collect_status if self.collected_amt: if hasattr(self.collected_amt, 'to_alipay_dict'):", "return self._payer_ip_role_id @payer_ip_role_id.setter def payer_ip_role_id(self, value): self._payer_ip_role_id = value @property def receipt_no(self): return", "def writeoff_relative_id(self, value): self._writeoff_relative_id = value def to_alipay_dict(self): params = dict() if self.bsn_no:", "CollectReceiptOpenApiDTO(object): def __init__(self): self._bsn_no = None self._bsn_ref_no = None self._business_scene = None self._channel", "= self.gmt_create.to_alipay_dict() else: params['gmt_create'] = self.gmt_create if self.gmt_modified: if hasattr(self.gmt_modified, 'to_alipay_dict'): params['gmt_modified'] =", "@property def payer_bank_branch_name(self): return self._payer_bank_branch_name @payer_bank_branch_name.setter def payer_bank_branch_name(self, value): self._payer_bank_branch_name = value @property", "gmt_modified(self): return self._gmt_modified @gmt_modified.setter def gmt_modified(self, value): self._gmt_modified = value @property def payee_account_name(self):", "self.channel_log_no: if hasattr(self.channel_log_no, 'to_alipay_dict'): params['channel_log_no'] = self.channel_log_no.to_alipay_dict() else: params['channel_log_no'] = self.channel_log_no if self.channel_memo:", "params['status'] = self.status.to_alipay_dict() else: params['status'] = self.status if self.tnt_inst_id: if hasattr(self.tnt_inst_id, 'to_alipay_dict'): params['tnt_inst_id']", "self.ref_trans_no_type: if hasattr(self.ref_trans_no_type, 'to_alipay_dict'): params['ref_trans_no_type'] = self.ref_trans_no_type.to_alipay_dict() else: params['ref_trans_no_type'] = self.ref_trans_no_type if self.source:", "def from_alipay_dict(d): if not d: return None o = CollectReceiptOpenApiDTO() if 'bsn_no' in", "self._bsn_no = None self._bsn_ref_no = None self._business_scene = None self._channel = None self._channel_log_no", "self.status if self.tnt_inst_id: if hasattr(self.tnt_inst_id, 'to_alipay_dict'): params['tnt_inst_id'] = self.tnt_inst_id.to_alipay_dict() else: params['tnt_inst_id'] = self.tnt_inst_id", "return self._payer_inst_id @payer_inst_id.setter def payer_inst_id(self, value): self._payer_inst_id = value @property def payer_ip_role_id(self): return", "self.fund_log_id if self.gl_exchange_rate: if hasattr(self.gl_exchange_rate, 'to_alipay_dict'): params['gl_exchange_rate'] = self.gl_exchange_rate.to_alipay_dict() else: params['gl_exchange_rate'] = self.gl_exchange_rate", "o.bsn_no = d['bsn_no'] if 'bsn_ref_no' in d: o.bsn_ref_no = d['bsn_ref_no'] if 'business_scene' in", "'payer_account_name' in d: o.payer_account_name = d['payer_account_name'] if 'payer_account_no' in d: o.payer_account_no = d['payer_account_no']", "= self.status.to_alipay_dict() else: params['status'] = self.status if self.tnt_inst_id: if hasattr(self.tnt_inst_id, 'to_alipay_dict'): params['tnt_inst_id'] =", "return self._source @source.setter def source(self, value): self._source = value @property def status(self): return", "hasattr(self.tnt_inst_id, 'to_alipay_dict'): params['tnt_inst_id'] = self.tnt_inst_id.to_alipay_dict() else: params['tnt_inst_id'] = self.tnt_inst_id if self.used_amt: if hasattr(self.used_amt,", "value): self._channel_memo = value @property def collect_amt(self): return self._collect_amt @collect_amt.setter def collect_amt(self, value):", "self.bsn_ref_no: if hasattr(self.bsn_ref_no, 'to_alipay_dict'): params['bsn_ref_no'] = self.bsn_ref_no.to_alipay_dict() else: params['bsn_ref_no'] = self.bsn_ref_no if self.business_scene:", "d: o.collect_amt = d['collect_amt'] if 'collect_date' in d: o.collect_date = d['collect_date'] if 'collect_status'", "writeoff_relative_id(self): return self._writeoff_relative_id @writeoff_relative_id.setter def writeoff_relative_id(self, value): self._writeoff_relative_id = value def to_alipay_dict(self): params", "@bsn_ref_no.setter def bsn_ref_no(self, value): self._bsn_ref_no = value @property def business_scene(self): return self._business_scene @business_scene.setter", "@property def channel_log_no(self): return self._channel_log_no @channel_log_no.setter def channel_log_no(self, value): self._channel_log_no = value @property", "alipay.aop.api.constant.ParamConstants import * from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi", "= value @property def collected_amt(self): return self._collected_amt @collected_amt.setter def collected_amt(self, value): if isinstance(value,", "payer_bank_branch_name(self): return self._payer_bank_branch_name @payer_bank_branch_name.setter def payer_bank_branch_name(self, value): self._payer_bank_branch_name = value @property def payer_inst_id(self):", "if hasattr(self.bsn_ref_no, 'to_alipay_dict'): params['bsn_ref_no'] = self.bsn_ref_no.to_alipay_dict() else: params['bsn_ref_no'] = self.bsn_ref_no if self.business_scene: if", "value @property def tnt_inst_id(self): return self._tnt_inst_id @tnt_inst_id.setter def tnt_inst_id(self, value): self._tnt_inst_id = value", "= value @property def gl_exchange_rate(self): return self._gl_exchange_rate @gl_exchange_rate.setter def gl_exchange_rate(self, value): self._gl_exchange_rate =", "value): self._creator = value @property def freeze_amt(self): return self._freeze_amt @freeze_amt.setter def freeze_amt(self, value):", "= None self._source = None self._status = None self._tnt_inst_id = None self._used_amt =", "d['receipt_no'] if 'ref_trans_no' in d: o.ref_trans_no = d['ref_trans_no'] if 'ref_trans_no_type' in d: o.ref_trans_no_type", "def channel(self, value): self._channel = value @property def channel_log_no(self): return self._channel_log_no @channel_log_no.setter def", "= None self._freeze_amt = None self._fund_log_id = None self._gl_exchange_rate = None self._gmt_create =", "= self.payee_account_no if self.payee_inst_id: if hasattr(self.payee_inst_id, 'to_alipay_dict'): params['payee_inst_id'] = self.payee_inst_id.to_alipay_dict() else: params['payee_inst_id'] =", "params['payer_inst_id'] = self.payer_inst_id if self.payer_ip_role_id: if hasattr(self.payer_ip_role_id, 'to_alipay_dict'): params['payer_ip_role_id'] = self.payer_ip_role_id.to_alipay_dict() else: params['payer_ip_role_id']", "params['payer_ip_role_id'] = self.payer_ip_role_id.to_alipay_dict() else: params['payer_ip_role_id'] = self.payer_ip_role_id if self.receipt_no: if hasattr(self.receipt_no, 'to_alipay_dict'): params['receipt_no']", "= self.tnt_inst_id.to_alipay_dict() else: params['tnt_inst_id'] = self.tnt_inst_id if self.used_amt: if hasattr(self.used_amt, 'to_alipay_dict'): params['used_amt'] =", "d['channel_log_no'] if 'channel_memo' in d: o.channel_memo = d['channel_memo'] if 'collect_amt' in d: o.collect_amt", "= self.payee_inst_id.to_alipay_dict() else: params['payee_inst_id'] = self.payee_inst_id if self.payee_ip_role_id: if hasattr(self.payee_ip_role_id, 'to_alipay_dict'): params['payee_ip_role_id'] =", "if self.payer_bank_branch_name: if hasattr(self.payer_bank_branch_name, 'to_alipay_dict'): params['payer_bank_branch_name'] = self.payer_bank_branch_name.to_alipay_dict() else: params['payer_bank_branch_name'] = self.payer_bank_branch_name if", "o.ref_trans_no = d['ref_trans_no'] if 'ref_trans_no_type' in d: o.ref_trans_no_type = d['ref_trans_no_type'] if 'source' in", "= None self._collected_amt = None self._creator = None self._freeze_amt = None self._fund_log_id =", "self._collect_status = None self._collected_amt = None self._creator = None self._freeze_amt = None self._fund_log_id", "self.gl_exchange_rate if self.gmt_create: if hasattr(self.gmt_create, 'to_alipay_dict'): params['gmt_create'] = self.gmt_create.to_alipay_dict() else: params['gmt_create'] = self.gmt_create", "@collect_status.setter def collect_status(self, value): self._collect_status = value @property def collected_amt(self): return self._collected_amt @collected_amt.setter", "payee_ip_role_id(self, value): self._payee_ip_role_id = value @property def payer_account_name(self): return self._payer_account_name @payer_account_name.setter def payer_account_name(self,", "value @property def receipt_no(self): return self._receipt_no @receipt_no.setter def receipt_no(self, value): self._receipt_no = value", "'to_alipay_dict'): params['channel_memo'] = self.channel_memo.to_alipay_dict() else: params['channel_memo'] = self.channel_memo if self.collect_amt: if hasattr(self.collect_amt, 'to_alipay_dict'):", "utf-8 -*- import json from alipay.aop.api.constant.ParamConstants import * from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from", "in d: o.receipt_no = d['receipt_no'] if 'ref_trans_no' in d: o.ref_trans_no = d['ref_trans_no'] if", "= d['bsn_ref_no'] if 'business_scene' in d: o.business_scene = d['business_scene'] if 'channel' in d:", "def tnt_inst_id(self): return self._tnt_inst_id @tnt_inst_id.setter def tnt_inst_id(self, value): self._tnt_inst_id = value @property def", "'to_alipay_dict'): params['ref_trans_no'] = self.ref_trans_no.to_alipay_dict() else: params['ref_trans_no'] = self.ref_trans_no if self.ref_trans_no_type: if hasattr(self.ref_trans_no_type, 'to_alipay_dict'):", "params['fund_log_id'] = self.fund_log_id.to_alipay_dict() else: params['fund_log_id'] = self.fund_log_id if self.gl_exchange_rate: if hasattr(self.gl_exchange_rate, 'to_alipay_dict'): params['gl_exchange_rate']", "value): self._status = value @property def tnt_inst_id(self): return self._tnt_inst_id @tnt_inst_id.setter def tnt_inst_id(self, value):", "o.channel_memo = d['channel_memo'] if 'collect_amt' in d: o.collect_amt = d['collect_amt'] if 'collect_date' in", "params['payer_account_name'] = self.payer_account_name if self.payer_account_no: if hasattr(self.payer_account_no, 'to_alipay_dict'): params['payer_account_no'] = self.payer_account_no.to_alipay_dict() else: params['payer_account_no']", "'collect_date' in d: o.collect_date = d['collect_date'] if 'collect_status' in d: o.collect_status = d['collect_status']", "return params @staticmethod def from_alipay_dict(d): if not d: return None o = CollectReceiptOpenApiDTO()", "in d: o.source = d['source'] if 'status' in d: o.status = d['status'] if", "self._payer_ip_role_id @payer_ip_role_id.setter def payer_ip_role_id(self, value): self._payer_ip_role_id = value @property def receipt_no(self): return self._receipt_no", "hasattr(self.collected_amt, 'to_alipay_dict'): params['collected_amt'] = self.collected_amt.to_alipay_dict() else: params['collected_amt'] = self.collected_amt if self.creator: if hasattr(self.creator,", "= value def to_alipay_dict(self): params = dict() if self.bsn_no: if hasattr(self.bsn_no, 'to_alipay_dict'): params['bsn_no']", "@property def gmt_create(self): return self._gmt_create @gmt_create.setter def gmt_create(self, value): self._gmt_create = value @property", "if self.bsn_no: if hasattr(self.bsn_no, 'to_alipay_dict'): params['bsn_no'] = self.bsn_no.to_alipay_dict() else: params['bsn_no'] = self.bsn_no if", "value else: self._freeze_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def fund_log_id(self): return self._fund_log_id @fund_log_id.setter def fund_log_id(self,", "hasattr(self.writeoff_relative_id, 'to_alipay_dict'): params['writeoff_relative_id'] = self.writeoff_relative_id.to_alipay_dict() else: params['writeoff_relative_id'] = self.writeoff_relative_id return params @staticmethod def", "= d['payer_inst_id'] if 'payer_ip_role_id' in d: o.payer_ip_role_id = d['payer_ip_role_id'] if 'receipt_no' in d:", "= self.receipt_no if self.ref_trans_no: if hasattr(self.ref_trans_no, 'to_alipay_dict'): params['ref_trans_no'] = self.ref_trans_no.to_alipay_dict() else: params['ref_trans_no'] =", "channel(self, value): self._channel = value @property def channel_log_no(self): return self._channel_log_no @channel_log_no.setter def channel_log_no(self,", "in d: o.status = d['status'] if 'tnt_inst_id' in d: o.tnt_inst_id = d['tnt_inst_id'] if", "self.payer_bank_branch_name: if hasattr(self.payer_bank_branch_name, 'to_alipay_dict'): params['payer_bank_branch_name'] = self.payer_bank_branch_name.to_alipay_dict() else: params['payer_bank_branch_name'] = self.payer_bank_branch_name if self.payer_inst_id:", "__init__(self): self._bsn_no = None self._bsn_ref_no = None self._business_scene = None self._channel = None", "self._ref_trans_no @ref_trans_no.setter def ref_trans_no(self, value): self._ref_trans_no = value @property def ref_trans_no_type(self): return self._ref_trans_no_type", "business_scene(self, value): self._business_scene = value @property def channel(self): return self._channel @channel.setter def channel(self,", "@property def payer_inst_id(self): return self._payer_inst_id @payer_inst_id.setter def payer_inst_id(self, value): self._payer_inst_id = value @property", "params['channel'] = self.channel.to_alipay_dict() else: params['channel'] = self.channel if self.channel_log_no: if hasattr(self.channel_log_no, 'to_alipay_dict'): params['channel_log_no']", "d: o.creator = d['creator'] if 'freeze_amt' in d: o.freeze_amt = d['freeze_amt'] if 'fund_log_id'", "python # -*- coding: utf-8 -*- import json from alipay.aop.api.constant.ParamConstants import * from", "def channel(self): return self._channel @channel.setter def channel(self, value): self._channel = value @property def", "hasattr(self.bsn_ref_no, 'to_alipay_dict'): params['bsn_ref_no'] = self.bsn_ref_no.to_alipay_dict() else: params['bsn_ref_no'] = self.bsn_ref_no if self.business_scene: if hasattr(self.business_scene,", "def collect_date(self, value): self._collect_date = value @property def collect_status(self): return self._collect_status @collect_status.setter def", "value @property def payer_bank_branch_name(self): return self._payer_bank_branch_name @payer_bank_branch_name.setter def payer_bank_branch_name(self, value): self._payer_bank_branch_name = value", "= self.ref_trans_no if self.ref_trans_no_type: if hasattr(self.ref_trans_no_type, 'to_alipay_dict'): params['ref_trans_no_type'] = self.ref_trans_no_type.to_alipay_dict() else: params['ref_trans_no_type'] =", "d: o.collected_amt = d['collected_amt'] if 'creator' in d: o.creator = d['creator'] if 'freeze_amt'", "return self._fund_log_id @fund_log_id.setter def fund_log_id(self, value): self._fund_log_id = value @property def gl_exchange_rate(self): return", "= d['collect_amt'] if 'collect_date' in d: o.collect_date = d['collect_date'] if 'collect_status' in d:", "d: o.payer_bank_branch_name = d['payer_bank_branch_name'] if 'payer_inst_id' in d: o.payer_inst_id = d['payer_inst_id'] if 'payer_ip_role_id'", "if hasattr(self.business_scene, 'to_alipay_dict'): params['business_scene'] = self.business_scene.to_alipay_dict() else: params['business_scene'] = self.business_scene if self.channel: if", "'to_alipay_dict'): params['ref_trans_no_type'] = self.ref_trans_no_type.to_alipay_dict() else: params['ref_trans_no_type'] = self.ref_trans_no_type if self.source: if hasattr(self.source, 'to_alipay_dict'):", "None self._gmt_modified = None self._payee_account_name = None self._payee_account_no = None self._payee_inst_id = None", "def fund_log_id(self, value): self._fund_log_id = value @property def gl_exchange_rate(self): return self._gl_exchange_rate @gl_exchange_rate.setter def", "o.payee_inst_id = d['payee_inst_id'] if 'payee_ip_role_id' in d: o.payee_ip_role_id = d['payee_ip_role_id'] if 'payer_account_name' in", "creator(self, value): self._creator = value @property def freeze_amt(self): return self._freeze_amt @freeze_amt.setter def freeze_amt(self,", "from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi class CollectReceiptOpenApiDTO(object): def __init__(self): self._bsn_no", "gmt_create(self, value): self._gmt_create = value @property def gmt_modified(self): return self._gmt_modified @gmt_modified.setter def gmt_modified(self,", "if hasattr(self.creator, 'to_alipay_dict'): params['creator'] = self.creator.to_alipay_dict() else: params['creator'] = self.creator if self.freeze_amt: if", "= self.payer_inst_id.to_alipay_dict() else: params['payer_inst_id'] = self.payer_inst_id if self.payer_ip_role_id: if hasattr(self.payer_ip_role_id, 'to_alipay_dict'): params['payer_ip_role_id'] =", "@used_amt.setter def used_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._used_amt = value else: self._used_amt =", "alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi class CollectReceiptOpenApiDTO(object):", "value @property def payee_account_no(self): return self._payee_account_no @payee_account_no.setter def payee_account_no(self, value): self._payee_account_no = value", "if self.gl_exchange_rate: if hasattr(self.gl_exchange_rate, 'to_alipay_dict'): params['gl_exchange_rate'] = self.gl_exchange_rate.to_alipay_dict() else: params['gl_exchange_rate'] = self.gl_exchange_rate if", "self.payee_ip_role_id.to_alipay_dict() else: params['payee_ip_role_id'] = self.payee_ip_role_id if self.payer_account_name: if hasattr(self.payer_account_name, 'to_alipay_dict'): params['payer_account_name'] = self.payer_account_name.to_alipay_dict()", "None self._payer_account_no = None self._payer_bank_branch_name = None self._payer_inst_id = None self._payer_ip_role_id = None", "self._freeze_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def fund_log_id(self): return self._fund_log_id @fund_log_id.setter def fund_log_id(self, value): self._fund_log_id", "params['payee_ip_role_id'] = self.payee_ip_role_id if self.payer_account_name: if hasattr(self.payer_account_name, 'to_alipay_dict'): params['payer_account_name'] = self.payer_account_name.to_alipay_dict() else: params['payer_account_name']", "from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from", "@payer_inst_id.setter def payer_inst_id(self, value): self._payer_inst_id = value @property def payer_ip_role_id(self): return self._payer_ip_role_id @payer_ip_role_id.setter", "value): self._writeoff_relative_id = value def to_alipay_dict(self): params = dict() if self.bsn_no: if hasattr(self.bsn_no,", "value else: self._collect_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def collect_date(self): return self._collect_date @collect_date.setter def collect_date(self,", "@bsn_no.setter def bsn_no(self, value): self._bsn_no = value @property def bsn_ref_no(self): return self._bsn_ref_no @bsn_ref_no.setter", "None self._channel_log_no = None self._channel_memo = None self._collect_amt = None self._collect_date = None", "self._payee_inst_id = None self._payee_ip_role_id = None self._payer_account_name = None self._payer_account_no = None self._payer_bank_branch_name", "creator(self): return self._creator @creator.setter def creator(self, value): self._creator = value @property def freeze_amt(self):", "value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._freeze_amt = value else: self._freeze_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def", "@property def fund_log_id(self): return self._fund_log_id @fund_log_id.setter def fund_log_id(self, value): self._fund_log_id = value @property", "return self._gmt_create @gmt_create.setter def gmt_create(self, value): self._gmt_create = value @property def gmt_modified(self): return", "= value @property def gmt_modified(self): return self._gmt_modified @gmt_modified.setter def gmt_modified(self, value): self._gmt_modified =", "def payee_account_no(self): return self._payee_account_no @payee_account_no.setter def payee_account_no(self, value): self._payee_account_no = value @property def", "else: params['channel_log_no'] = self.channel_log_no if self.channel_memo: if hasattr(self.channel_memo, 'to_alipay_dict'): params['channel_memo'] = self.channel_memo.to_alipay_dict() else:", "self.creator.to_alipay_dict() else: params['creator'] = self.creator if self.freeze_amt: if hasattr(self.freeze_amt, 'to_alipay_dict'): params['freeze_amt'] = self.freeze_amt.to_alipay_dict()", "else: self._freeze_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def fund_log_id(self): return self._fund_log_id @fund_log_id.setter def fund_log_id(self, value):", "if self.freeze_amt: if hasattr(self.freeze_amt, 'to_alipay_dict'): params['freeze_amt'] = self.freeze_amt.to_alipay_dict() else: params['freeze_amt'] = self.freeze_amt if", "params['gmt_create'] = self.gmt_create if self.gmt_modified: if hasattr(self.gmt_modified, 'to_alipay_dict'): params['gmt_modified'] = self.gmt_modified.to_alipay_dict() else: params['gmt_modified']", "'to_alipay_dict'): params['channel'] = self.channel.to_alipay_dict() else: params['channel'] = self.channel if self.channel_log_no: if hasattr(self.channel_log_no, 'to_alipay_dict'):", "if 'payer_account_no' in d: o.payer_account_no = d['payer_account_no'] if 'payer_bank_branch_name' in d: o.payer_bank_branch_name =", "gl_exchange_rate(self, value): self._gl_exchange_rate = value @property def gmt_create(self): return self._gmt_create @gmt_create.setter def gmt_create(self,", "self._ref_trans_no_type @ref_trans_no_type.setter def ref_trans_no_type(self, value): self._ref_trans_no_type = value @property def source(self): return self._source", "@payer_ip_role_id.setter def payer_ip_role_id(self, value): self._payer_ip_role_id = value @property def receipt_no(self): return self._receipt_no @receipt_no.setter", "params['gmt_modified'] = self.gmt_modified.to_alipay_dict() else: params['gmt_modified'] = self.gmt_modified if self.payee_account_name: if hasattr(self.payee_account_name, 'to_alipay_dict'): params['payee_account_name']", "if 'payee_account_name' in d: o.payee_account_name = d['payee_account_name'] if 'payee_account_no' in d: o.payee_account_no =", "o.collected_amt = d['collected_amt'] if 'creator' in d: o.creator = d['creator'] if 'freeze_amt' in", "self._fund_log_id @fund_log_id.setter def fund_log_id(self, value): self._fund_log_id = value @property def gl_exchange_rate(self): return self._gl_exchange_rate", "self._status = None self._tnt_inst_id = None self._used_amt = None self._writeoff_relative_id = None @property", "self._source @source.setter def source(self, value): self._source = value @property def status(self): return self._status", "if hasattr(self.used_amt, 'to_alipay_dict'): params['used_amt'] = self.used_amt.to_alipay_dict() else: params['used_amt'] = self.used_amt if self.writeoff_relative_id: if", "MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def creator(self): return self._creator @creator.setter def creator(self, value): self._creator = value", "params['freeze_amt'] = self.freeze_amt if self.fund_log_id: if hasattr(self.fund_log_id, 'to_alipay_dict'): params['fund_log_id'] = self.fund_log_id.to_alipay_dict() else: params['fund_log_id']", "else: params['receipt_no'] = self.receipt_no if self.ref_trans_no: if hasattr(self.ref_trans_no, 'to_alipay_dict'): params['ref_trans_no'] = self.ref_trans_no.to_alipay_dict() else:", "params['receipt_no'] = self.receipt_no.to_alipay_dict() else: params['receipt_no'] = self.receipt_no if self.ref_trans_no: if hasattr(self.ref_trans_no, 'to_alipay_dict'): params['ref_trans_no']", "'payee_account_name' in d: o.payee_account_name = d['payee_account_name'] if 'payee_account_no' in d: o.payee_account_no = d['payee_account_no']", "d['ref_trans_no'] if 'ref_trans_no_type' in d: o.ref_trans_no_type = d['ref_trans_no_type'] if 'source' in d: o.source", "if hasattr(self.collect_status, 'to_alipay_dict'): params['collect_status'] = self.collect_status.to_alipay_dict() else: params['collect_status'] = self.collect_status if self.collected_amt: if", "in d: o.gmt_modified = d['gmt_modified'] if 'payee_account_name' in d: o.payee_account_name = d['payee_account_name'] if", "= self.payee_ip_role_id.to_alipay_dict() else: params['payee_ip_role_id'] = self.payee_ip_role_id if self.payer_account_name: if hasattr(self.payer_account_name, 'to_alipay_dict'): params['payer_account_name'] =", "d: o.tnt_inst_id = d['tnt_inst_id'] if 'used_amt' in d: o.used_amt = d['used_amt'] if 'writeoff_relative_id'", "self.source.to_alipay_dict() else: params['source'] = self.source if self.status: if hasattr(self.status, 'to_alipay_dict'): params['status'] = self.status.to_alipay_dict()", "else: params['payer_account_no'] = self.payer_account_no if self.payer_bank_branch_name: if hasattr(self.payer_bank_branch_name, 'to_alipay_dict'): params['payer_bank_branch_name'] = self.payer_bank_branch_name.to_alipay_dict() else:", "@property def gmt_modified(self): return self._gmt_modified @gmt_modified.setter def gmt_modified(self, value): self._gmt_modified = value @property", "self.freeze_amt: if hasattr(self.freeze_amt, 'to_alipay_dict'): params['freeze_amt'] = self.freeze_amt.to_alipay_dict() else: params['freeze_amt'] = self.freeze_amt if self.fund_log_id:", "if hasattr(self.status, 'to_alipay_dict'): params['status'] = self.status.to_alipay_dict() else: params['status'] = self.status if self.tnt_inst_id: if", "= value @property def source(self): return self._source @source.setter def source(self, value): self._source =", "else: params['ref_trans_no'] = self.ref_trans_no if self.ref_trans_no_type: if hasattr(self.ref_trans_no_type, 'to_alipay_dict'): params['ref_trans_no_type'] = self.ref_trans_no_type.to_alipay_dict() else:", "def payer_bank_branch_name(self): return self._payer_bank_branch_name @payer_bank_branch_name.setter def payer_bank_branch_name(self, value): self._payer_bank_branch_name = value @property def", "'to_alipay_dict'): params['bsn_ref_no'] = self.bsn_ref_no.to_alipay_dict() else: params['bsn_ref_no'] = self.bsn_ref_no if self.business_scene: if hasattr(self.business_scene, 'to_alipay_dict'):", "if self.used_amt: if hasattr(self.used_amt, 'to_alipay_dict'): params['used_amt'] = self.used_amt.to_alipay_dict() else: params['used_amt'] = self.used_amt if", "= d['ref_trans_no'] if 'ref_trans_no_type' in d: o.ref_trans_no_type = d['ref_trans_no_type'] if 'source' in d:", "'to_alipay_dict'): params['payee_account_no'] = self.payee_account_no.to_alipay_dict() else: params['payee_account_no'] = self.payee_account_no if self.payee_inst_id: if hasattr(self.payee_inst_id, 'to_alipay_dict'):", "= dict() if self.bsn_no: if hasattr(self.bsn_no, 'to_alipay_dict'): params['bsn_no'] = self.bsn_no.to_alipay_dict() else: params['bsn_no'] =", "@property def status(self): return self._status @status.setter def status(self, value): self._status = value @property", "d: o.ref_trans_no = d['ref_trans_no'] if 'ref_trans_no_type' in d: o.ref_trans_no_type = d['ref_trans_no_type'] if 'source'", "hasattr(self.payer_account_no, 'to_alipay_dict'): params['payer_account_no'] = self.payer_account_no.to_alipay_dict() else: params['payer_account_no'] = self.payer_account_no if self.payer_bank_branch_name: if hasattr(self.payer_bank_branch_name,", "dict() if self.bsn_no: if hasattr(self.bsn_no, 'to_alipay_dict'): params['bsn_no'] = self.bsn_no.to_alipay_dict() else: params['bsn_no'] = self.bsn_no", "params['collect_date'] = self.collect_date.to_alipay_dict() else: params['collect_date'] = self.collect_date if self.collect_status: if hasattr(self.collect_status, 'to_alipay_dict'): params['collect_status']", "else: params['freeze_amt'] = self.freeze_amt if self.fund_log_id: if hasattr(self.fund_log_id, 'to_alipay_dict'): params['fund_log_id'] = self.fund_log_id.to_alipay_dict() else:", "@channel.setter def channel(self, value): self._channel = value @property def channel_log_no(self): return self._channel_log_no @channel_log_no.setter", "receipt_no(self, value): self._receipt_no = value @property def ref_trans_no(self): return self._ref_trans_no @ref_trans_no.setter def ref_trans_no(self,", "hasattr(self.fund_log_id, 'to_alipay_dict'): params['fund_log_id'] = self.fund_log_id.to_alipay_dict() else: params['fund_log_id'] = self.fund_log_id if self.gl_exchange_rate: if hasattr(self.gl_exchange_rate,", "params['payee_account_no'] = self.payee_account_no if self.payee_inst_id: if hasattr(self.payee_inst_id, 'to_alipay_dict'): params['payee_inst_id'] = self.payee_inst_id.to_alipay_dict() else: params['payee_inst_id']", "o.collect_amt = d['collect_amt'] if 'collect_date' in d: o.collect_date = d['collect_date'] if 'collect_status' in", "value): self._gmt_create = value @property def gmt_modified(self): return self._gmt_modified @gmt_modified.setter def gmt_modified(self, value):", "d: o.business_scene = d['business_scene'] if 'channel' in d: o.channel = d['channel'] if 'channel_log_no'", "value @property def gmt_modified(self): return self._gmt_modified @gmt_modified.setter def gmt_modified(self, value): self._gmt_modified = value", "@source.setter def source(self, value): self._source = value @property def status(self): return self._status @status.setter", "= d['receipt_no'] if 'ref_trans_no' in d: o.ref_trans_no = d['ref_trans_no'] if 'ref_trans_no_type' in d:", "isinstance(value, MultiCurrencyMoneyOpenApi): self._collect_amt = value else: self._collect_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def collect_date(self): return", "value): self._payee_account_no = value @property def payee_inst_id(self): return self._payee_inst_id @payee_inst_id.setter def payee_inst_id(self, value):", "'to_alipay_dict'): params['status'] = self.status.to_alipay_dict() else: params['status'] = self.status if self.tnt_inst_id: if hasattr(self.tnt_inst_id, 'to_alipay_dict'):", "= d['payee_account_no'] if 'payee_inst_id' in d: o.payee_inst_id = d['payee_inst_id'] if 'payee_ip_role_id' in d:", "d: o.receipt_no = d['receipt_no'] if 'ref_trans_no' in d: o.ref_trans_no = d['ref_trans_no'] if 'ref_trans_no_type'", "status(self): return self._status @status.setter def status(self, value): self._status = value @property def tnt_inst_id(self):", "self._channel_log_no = None self._channel_memo = None self._collect_amt = None self._collect_date = None self._collect_status", "def collect_status(self, value): self._collect_status = value @property def collected_amt(self): return self._collected_amt @collected_amt.setter def", "MultiCurrencyMoneyOpenApi): self._used_amt = value else: self._used_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def writeoff_relative_id(self): return self._writeoff_relative_id", "in d: o.creator = d['creator'] if 'freeze_amt' in d: o.freeze_amt = d['freeze_amt'] if", "o.status = d['status'] if 'tnt_inst_id' in d: o.tnt_inst_id = d['tnt_inst_id'] if 'used_amt' in", "'to_alipay_dict'): params['source'] = self.source.to_alipay_dict() else: params['source'] = self.source if self.status: if hasattr(self.status, 'to_alipay_dict'):", "if 'payee_ip_role_id' in d: o.payee_ip_role_id = d['payee_ip_role_id'] if 'payer_account_name' in d: o.payer_account_name =", "tnt_inst_id(self, value): self._tnt_inst_id = value @property def used_amt(self): return self._used_amt @used_amt.setter def used_amt(self,", "return self._creator @creator.setter def creator(self, value): self._creator = value @property def freeze_amt(self): return", "None self._payee_ip_role_id = None self._payer_account_name = None self._payer_account_no = None self._payer_bank_branch_name = None", "None self._fund_log_id = None self._gl_exchange_rate = None self._gmt_create = None self._gmt_modified = None", "self.payee_account_name if self.payee_account_no: if hasattr(self.payee_account_no, 'to_alipay_dict'): params['payee_account_no'] = self.payee_account_no.to_alipay_dict() else: params['payee_account_no'] = self.payee_account_no", "@receipt_no.setter def receipt_no(self, value): self._receipt_no = value @property def ref_trans_no(self): return self._ref_trans_no @ref_trans_no.setter", "= self.payee_account_name.to_alipay_dict() else: params['payee_account_name'] = self.payee_account_name if self.payee_account_no: if hasattr(self.payee_account_no, 'to_alipay_dict'): params['payee_account_no'] =", "None self._payer_ip_role_id = None self._receipt_no = None self._ref_trans_no = None self._ref_trans_no_type = None", "self._bsn_no = value @property def bsn_ref_no(self): return self._bsn_ref_no @bsn_ref_no.setter def bsn_ref_no(self, value): self._bsn_ref_no", "payee_inst_id(self): return self._payee_inst_id @payee_inst_id.setter def payee_inst_id(self, value): self._payee_inst_id = value @property def payee_ip_role_id(self):", "self._ref_trans_no = value @property def ref_trans_no_type(self): return self._ref_trans_no_type @ref_trans_no_type.setter def ref_trans_no_type(self, value): self._ref_trans_no_type", "'to_alipay_dict'): params['business_scene'] = self.business_scene.to_alipay_dict() else: params['business_scene'] = self.business_scene if self.channel: if hasattr(self.channel, 'to_alipay_dict'):", "self.channel_memo if self.collect_amt: if hasattr(self.collect_amt, 'to_alipay_dict'): params['collect_amt'] = self.collect_amt.to_alipay_dict() else: params['collect_amt'] = self.collect_amt", "def payee_account_name(self, value): self._payee_account_name = value @property def payee_account_no(self): return self._payee_account_no @payee_account_no.setter def", "value @property def payee_account_name(self): return self._payee_account_name @payee_account_name.setter def payee_account_name(self, value): self._payee_account_name = value", "@payer_bank_branch_name.setter def payer_bank_branch_name(self, value): self._payer_bank_branch_name = value @property def payer_inst_id(self): return self._payer_inst_id @payer_inst_id.setter", "value @property def ref_trans_no_type(self): return self._ref_trans_no_type @ref_trans_no_type.setter def ref_trans_no_type(self, value): self._ref_trans_no_type = value", "@collected_amt.setter def collected_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collected_amt = value else: self._collected_amt =", "self._freeze_amt = value else: self._freeze_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def fund_log_id(self): return self._fund_log_id @fund_log_id.setter", "to_alipay_dict(self): params = dict() if self.bsn_no: if hasattr(self.bsn_no, 'to_alipay_dict'): params['bsn_no'] = self.bsn_no.to_alipay_dict() else:", "= self.channel.to_alipay_dict() else: params['channel'] = self.channel if self.channel_log_no: if hasattr(self.channel_log_no, 'to_alipay_dict'): params['channel_log_no'] =", "if self.collect_status: if hasattr(self.collect_status, 'to_alipay_dict'): params['collect_status'] = self.collect_status.to_alipay_dict() else: params['collect_status'] = self.collect_status if", "'creator' in d: o.creator = d['creator'] if 'freeze_amt' in d: o.freeze_amt = d['freeze_amt']", "'payer_bank_branch_name' in d: o.payer_bank_branch_name = d['payer_bank_branch_name'] if 'payer_inst_id' in d: o.payer_inst_id = d['payer_inst_id']", "else: params['payer_bank_branch_name'] = self.payer_bank_branch_name if self.payer_inst_id: if hasattr(self.payer_inst_id, 'to_alipay_dict'): params['payer_inst_id'] = self.payer_inst_id.to_alipay_dict() else:", "= d['source'] if 'status' in d: o.status = d['status'] if 'tnt_inst_id' in d:", "params['payer_bank_branch_name'] = self.payer_bank_branch_name.to_alipay_dict() else: params['payer_bank_branch_name'] = self.payer_bank_branch_name if self.payer_inst_id: if hasattr(self.payer_inst_id, 'to_alipay_dict'): params['payer_inst_id']", "None @property def bsn_no(self): return self._bsn_no @bsn_no.setter def bsn_no(self, value): self._bsn_no = value", "collect_status(self, value): self._collect_status = value @property def collected_amt(self): return self._collected_amt @collected_amt.setter def collected_amt(self,", "if hasattr(self.payee_account_name, 'to_alipay_dict'): params['payee_account_name'] = self.payee_account_name.to_alipay_dict() else: params['payee_account_name'] = self.payee_account_name if self.payee_account_no: if", "self._source = value @property def status(self): return self._status @status.setter def status(self, value): self._status", "= self.ref_trans_no.to_alipay_dict() else: params['ref_trans_no'] = self.ref_trans_no if self.ref_trans_no_type: if hasattr(self.ref_trans_no_type, 'to_alipay_dict'): params['ref_trans_no_type'] =", "if hasattr(self.gmt_modified, 'to_alipay_dict'): params['gmt_modified'] = self.gmt_modified.to_alipay_dict() else: params['gmt_modified'] = self.gmt_modified if self.payee_account_name: if", "value): self._gl_exchange_rate = value @property def gmt_create(self): return self._gmt_create @gmt_create.setter def gmt_create(self, value):", "channel_log_no(self, value): self._channel_log_no = value @property def channel_memo(self): return self._channel_memo @channel_memo.setter def channel_memo(self,", "'to_alipay_dict'): params['gmt_modified'] = self.gmt_modified.to_alipay_dict() else: params['gmt_modified'] = self.gmt_modified if self.payee_account_name: if hasattr(self.payee_account_name, 'to_alipay_dict'):", "params['used_amt'] = self.used_amt.to_alipay_dict() else: params['used_amt'] = self.used_amt if self.writeoff_relative_id: if hasattr(self.writeoff_relative_id, 'to_alipay_dict'): params['writeoff_relative_id']", "@property def ref_trans_no(self): return self._ref_trans_no @ref_trans_no.setter def ref_trans_no(self, value): self._ref_trans_no = value @property", "MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def fund_log_id(self): return self._fund_log_id @fund_log_id.setter def fund_log_id(self, value): self._fund_log_id = value", "else: self._used_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def writeoff_relative_id(self): return self._writeoff_relative_id @writeoff_relative_id.setter def writeoff_relative_id(self, value):", "else: params['business_scene'] = self.business_scene if self.channel: if hasattr(self.channel, 'to_alipay_dict'): params['channel'] = self.channel.to_alipay_dict() else:", "None self._collect_amt = None self._collect_date = None self._collect_status = None self._collected_amt = None", "= None self._payer_account_no = None self._payer_bank_branch_name = None self._payer_inst_id = None self._payer_ip_role_id =", "@payee_inst_id.setter def payee_inst_id(self, value): self._payee_inst_id = value @property def payee_ip_role_id(self): return self._payee_ip_role_id @payee_ip_role_id.setter", "else: params['collect_status'] = self.collect_status if self.collected_amt: if hasattr(self.collected_amt, 'to_alipay_dict'): params['collected_amt'] = self.collected_amt.to_alipay_dict() else:", "def payee_account_name(self): return self._payee_account_name @payee_account_name.setter def payee_account_name(self, value): self._payee_account_name = value @property def", "d: o.bsn_ref_no = d['bsn_ref_no'] if 'business_scene' in d: o.business_scene = d['business_scene'] if 'channel'", "if isinstance(value, MultiCurrencyMoneyOpenApi): self._collect_amt = value else: self._collect_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def collect_date(self):", "value @property def source(self): return self._source @source.setter def source(self, value): self._source = value", "value @property def collected_amt(self): return self._collected_amt @collected_amt.setter def collected_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi):", "self.receipt_no.to_alipay_dict() else: params['receipt_no'] = self.receipt_no if self.ref_trans_no: if hasattr(self.ref_trans_no, 'to_alipay_dict'): params['ref_trans_no'] = self.ref_trans_no.to_alipay_dict()", "@channel_log_no.setter def channel_log_no(self, value): self._channel_log_no = value @property def channel_memo(self): return self._channel_memo @channel_memo.setter", "def source(self): return self._source @source.setter def source(self, value): self._source = value @property def", "self._payee_account_no @payee_account_no.setter def payee_account_no(self, value): self._payee_account_no = value @property def payee_inst_id(self): return self._payee_inst_id", "if hasattr(self.channel_memo, 'to_alipay_dict'): params['channel_memo'] = self.channel_memo.to_alipay_dict() else: params['channel_memo'] = self.channel_memo if self.collect_amt: if", "import MultiCurrencyMoneyOpenApi class CollectReceiptOpenApiDTO(object): def __init__(self): self._bsn_no = None self._bsn_ref_no = None self._business_scene", "params['bsn_ref_no'] = self.bsn_ref_no if self.business_scene: if hasattr(self.business_scene, 'to_alipay_dict'): params['business_scene'] = self.business_scene.to_alipay_dict() else: params['business_scene']", "if self.collect_amt: if hasattr(self.collect_amt, 'to_alipay_dict'): params['collect_amt'] = self.collect_amt.to_alipay_dict() else: params['collect_amt'] = self.collect_amt if", "'to_alipay_dict'): params['payer_inst_id'] = self.payer_inst_id.to_alipay_dict() else: params['payer_inst_id'] = self.payer_inst_id if self.payer_ip_role_id: if hasattr(self.payer_ip_role_id, 'to_alipay_dict'):", "if 'gmt_create' in d: o.gmt_create = d['gmt_create'] if 'gmt_modified' in d: o.gmt_modified =", "import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import", "if 'collect_status' in d: o.collect_status = d['collect_status'] if 'collected_amt' in d: o.collected_amt =", "else: params['tnt_inst_id'] = self.tnt_inst_id if self.used_amt: if hasattr(self.used_amt, 'to_alipay_dict'): params['used_amt'] = self.used_amt.to_alipay_dict() else:", "@property def collect_amt(self): return self._collect_amt @collect_amt.setter def collect_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collect_amt", "None self._payee_account_name = None self._payee_account_no = None self._payee_inst_id = None self._payee_ip_role_id = None", "in d: o.gmt_create = d['gmt_create'] if 'gmt_modified' in d: o.gmt_modified = d['gmt_modified'] if", "d['collect_date'] if 'collect_status' in d: o.collect_status = d['collect_status'] if 'collected_amt' in d: o.collected_amt", "self.collect_status.to_alipay_dict() else: params['collect_status'] = self.collect_status if self.collected_amt: if hasattr(self.collected_amt, 'to_alipay_dict'): params['collected_amt'] = self.collected_amt.to_alipay_dict()", "self._payer_account_name = None self._payer_account_no = None self._payer_bank_branch_name = None self._payer_inst_id = None self._payer_ip_role_id", "= None self._payee_inst_id = None self._payee_ip_role_id = None self._payer_account_name = None self._payer_account_no =", "self.ref_trans_no.to_alipay_dict() else: params['ref_trans_no'] = self.ref_trans_no if self.ref_trans_no_type: if hasattr(self.ref_trans_no_type, 'to_alipay_dict'): params['ref_trans_no_type'] = self.ref_trans_no_type.to_alipay_dict()", "def creator(self, value): self._creator = value @property def freeze_amt(self): return self._freeze_amt @freeze_amt.setter def", "if self.channel_memo: if hasattr(self.channel_memo, 'to_alipay_dict'): params['channel_memo'] = self.channel_memo.to_alipay_dict() else: params['channel_memo'] = self.channel_memo if", "params['channel_memo'] = self.channel_memo if self.collect_amt: if hasattr(self.collect_amt, 'to_alipay_dict'): params['collect_amt'] = self.collect_amt.to_alipay_dict() else: params['collect_amt']", "self.payee_account_name: if hasattr(self.payee_account_name, 'to_alipay_dict'): params['payee_account_name'] = self.payee_account_name.to_alipay_dict() else: params['payee_account_name'] = self.payee_account_name if self.payee_account_no:", "self._bsn_ref_no = None self._business_scene = None self._channel = None self._channel_log_no = None self._channel_memo", "if hasattr(self.tnt_inst_id, 'to_alipay_dict'): params['tnt_inst_id'] = self.tnt_inst_id.to_alipay_dict() else: params['tnt_inst_id'] = self.tnt_inst_id if self.used_amt: if", "params['collected_amt'] = self.collected_amt if self.creator: if hasattr(self.creator, 'to_alipay_dict'): params['creator'] = self.creator.to_alipay_dict() else: params['creator']", "@property def channel_memo(self): return self._channel_memo @channel_memo.setter def channel_memo(self, value): self._channel_memo = value @property", "= d['payee_inst_id'] if 'payee_ip_role_id' in d: o.payee_ip_role_id = d['payee_ip_role_id'] if 'payer_account_name' in d:", "d['source'] if 'status' in d: o.status = d['status'] if 'tnt_inst_id' in d: o.tnt_inst_id", "d: o.payer_account_no = d['payer_account_no'] if 'payer_bank_branch_name' in d: o.payer_bank_branch_name = d['payer_bank_branch_name'] if 'payer_inst_id'", "def fund_log_id(self): return self._fund_log_id @fund_log_id.setter def fund_log_id(self, value): self._fund_log_id = value @property def", "params['source'] = self.source.to_alipay_dict() else: params['source'] = self.source if self.status: if hasattr(self.status, 'to_alipay_dict'): params['status']", "if 'bsn_no' in d: o.bsn_no = d['bsn_no'] if 'bsn_ref_no' in d: o.bsn_ref_no =", "self.gl_exchange_rate.to_alipay_dict() else: params['gl_exchange_rate'] = self.gl_exchange_rate if self.gmt_create: if hasattr(self.gmt_create, 'to_alipay_dict'): params['gmt_create'] = self.gmt_create.to_alipay_dict()", "d['fund_log_id'] if 'gl_exchange_rate' in d: o.gl_exchange_rate = d['gl_exchange_rate'] if 'gmt_create' in d: o.gmt_create", "def bsn_ref_no(self): return self._bsn_ref_no @bsn_ref_no.setter def bsn_ref_no(self, value): self._bsn_ref_no = value @property def", "'to_alipay_dict'): params['collected_amt'] = self.collected_amt.to_alipay_dict() else: params['collected_amt'] = self.collected_amt if self.creator: if hasattr(self.creator, 'to_alipay_dict'):", "params['bsn_ref_no'] = self.bsn_ref_no.to_alipay_dict() else: params['bsn_ref_no'] = self.bsn_ref_no if self.business_scene: if hasattr(self.business_scene, 'to_alipay_dict'): params['business_scene']", "if 'fund_log_id' in d: o.fund_log_id = d['fund_log_id'] if 'gl_exchange_rate' in d: o.gl_exchange_rate =", "value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collect_amt = value else: self._collect_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def", "self._business_scene = None self._channel = None self._channel_log_no = None self._channel_memo = None self._collect_amt", "else: params['source'] = self.source if self.status: if hasattr(self.status, 'to_alipay_dict'): params['status'] = self.status.to_alipay_dict() else:", "MultiCurrencyMoneyOpenApi): self._collected_amt = value else: self._collected_amt = MultiCurrencyMoneyOpenApi.from_alipay_dict(value) @property def creator(self): return self._creator", "@gl_exchange_rate.setter def gl_exchange_rate(self, value): self._gl_exchange_rate = value @property def gmt_create(self): return self._gmt_create @gmt_create.setter", "else: params['payee_inst_id'] = self.payee_inst_id if self.payee_ip_role_id: if hasattr(self.payee_ip_role_id, 'to_alipay_dict'): params['payee_ip_role_id'] = self.payee_ip_role_id.to_alipay_dict() else:", "d['payer_inst_id'] if 'payer_ip_role_id' in d: o.payer_ip_role_id = d['payer_ip_role_id'] if 'receipt_no' in d: o.receipt_no", "d: o.used_amt = d['used_amt'] if 'writeoff_relative_id' in d: o.writeoff_relative_id = d['writeoff_relative_id'] return o", "else: params['collected_amt'] = self.collected_amt if self.creator: if hasattr(self.creator, 'to_alipay_dict'): params['creator'] = self.creator.to_alipay_dict() else:", "if 'payer_account_name' in d: o.payer_account_name = d['payer_account_name'] if 'payer_account_no' in d: o.payer_account_no =", "d['payer_ip_role_id'] if 'receipt_no' in d: o.receipt_no = d['receipt_no'] if 'ref_trans_no' in d: o.ref_trans_no", "self._collect_amt @collect_amt.setter def collect_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collect_amt = value else: self._collect_amt", "params['gmt_modified'] = self.gmt_modified if self.payee_account_name: if hasattr(self.payee_account_name, 'to_alipay_dict'): params['payee_account_name'] = self.payee_account_name.to_alipay_dict() else: params['payee_account_name']", "hasattr(self.collect_status, 'to_alipay_dict'): params['collect_status'] = self.collect_status.to_alipay_dict() else: params['collect_status'] = self.collect_status if self.collected_amt: if hasattr(self.collected_amt,", "None self._gmt_create = None self._gmt_modified = None self._payee_account_name = None self._payee_account_no = None", "= value @property def payee_inst_id(self): return self._payee_inst_id @payee_inst_id.setter def payee_inst_id(self, value): self._payee_inst_id =", "o.channel_log_no = d['channel_log_no'] if 'channel_memo' in d: o.channel_memo = d['channel_memo'] if 'collect_amt' in", "d: o.payer_account_name = d['payer_account_name'] if 'payer_account_no' in d: o.payer_account_no = d['payer_account_no'] if 'payer_bank_branch_name'", "self.payer_inst_id.to_alipay_dict() else: params['payer_inst_id'] = self.payer_inst_id if self.payer_ip_role_id: if hasattr(self.payer_ip_role_id, 'to_alipay_dict'): params['payer_ip_role_id'] = self.payer_ip_role_id.to_alipay_dict()", "= d['channel'] if 'channel_log_no' in d: o.channel_log_no = d['channel_log_no'] if 'channel_memo' in d:", "value @property def channel(self): return self._channel @channel.setter def channel(self, value): self._channel = value", "self.ref_trans_no_type if self.source: if hasattr(self.source, 'to_alipay_dict'): params['source'] = self.source.to_alipay_dict() else: params['source'] = self.source", "alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi from alipay.aop.api.domain.MultiCurrencyMoneyOpenApi import MultiCurrencyMoneyOpenApi class CollectReceiptOpenApiDTO(object): def __init__(self): self._bsn_no =", "@property def collected_amt(self): return self._collected_amt @collected_amt.setter def collected_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi): self._collected_amt", "self._gmt_modified @gmt_modified.setter def gmt_modified(self, value): self._gmt_modified = value @property def payee_account_name(self): return self._payee_account_name", "value @property def collect_amt(self): return self._collect_amt @collect_amt.setter def collect_amt(self, value): if isinstance(value, MultiCurrencyMoneyOpenApi):", "self._collect_date = value @property def collect_status(self): return self._collect_status @collect_status.setter def collect_status(self, value): self._collect_status" ]
[ "import front.models class Migration(migrations.Migration): dependencies = [ ('front', '0002_images_filename'), ] operations = [", "Generated by Django 2.1.4 on 2019-03-18 22:00 from django.db import migrations, models import", "2019-03-18 22:00 from django.db import migrations, models import front.models class Migration(migrations.Migration): dependencies =", "Django 2.1.4 on 2019-03-18 22:00 from django.db import migrations, models import front.models class", "dependencies = [ ('front', '0002_images_filename'), ] operations = [ migrations.AlterField( model_name='images', name='ifile', field=models.ImageField(unique=True,", "by Django 2.1.4 on 2019-03-18 22:00 from django.db import migrations, models import front.models", "('front', '0002_images_filename'), ] operations = [ migrations.AlterField( model_name='images', name='ifile', field=models.ImageField(unique=True, upload_to=front.models.upld_dir), ), ]", "migrations, models import front.models class Migration(migrations.Migration): dependencies = [ ('front', '0002_images_filename'), ] operations", "2.1.4 on 2019-03-18 22:00 from django.db import migrations, models import front.models class Migration(migrations.Migration):", "class Migration(migrations.Migration): dependencies = [ ('front', '0002_images_filename'), ] operations = [ migrations.AlterField( model_name='images',", "import migrations, models import front.models class Migration(migrations.Migration): dependencies = [ ('front', '0002_images_filename'), ]", "front.models class Migration(migrations.Migration): dependencies = [ ('front', '0002_images_filename'), ] operations = [ migrations.AlterField(", "django.db import migrations, models import front.models class Migration(migrations.Migration): dependencies = [ ('front', '0002_images_filename'),", "on 2019-03-18 22:00 from django.db import migrations, models import front.models class Migration(migrations.Migration): dependencies", "= [ ('front', '0002_images_filename'), ] operations = [ migrations.AlterField( model_name='images', name='ifile', field=models.ImageField(unique=True, upload_to=front.models.upld_dir),", "[ ('front', '0002_images_filename'), ] operations = [ migrations.AlterField( model_name='images', name='ifile', field=models.ImageField(unique=True, upload_to=front.models.upld_dir), ),", "models import front.models class Migration(migrations.Migration): dependencies = [ ('front', '0002_images_filename'), ] operations =", "# Generated by Django 2.1.4 on 2019-03-18 22:00 from django.db import migrations, models", "22:00 from django.db import migrations, models import front.models class Migration(migrations.Migration): dependencies = [", "Migration(migrations.Migration): dependencies = [ ('front', '0002_images_filename'), ] operations = [ migrations.AlterField( model_name='images', name='ifile',", "from django.db import migrations, models import front.models class Migration(migrations.Migration): dependencies = [ ('front'," ]
[ "loss's scalar and histogram summary :type var: tensorflow.Variable :param var: variable to summary", "mean_summary(var): \"\"\"mean scalar summary :type var: tensorflow.Variable :param var: variable to add summary", "tf.summary.histogram('histogram', var) def max_summary(var): \"\"\"max scalar summary :type var: tensorflow.Variable :param var: variable", "mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean) tf.summary.histogram('histogram', var) def summary_image(var, max_outputs=0): \"\"\"image summary :type", "max_summary(var): \"\"\"max scalar summary :type var: tensorflow.Variable :param var: variable to add summary", "to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean) def stddev_summary(var):", "\"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean))) tf.summary.scalar(\"stddev\", stddev)", "\"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"max\", tf.reduce_max(var)) def min_summary(var): \"\"\"min summary :type var: tensorflow.Variable :param", "var: variable to summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean) tf.summary.histogram('histogram',", "mean) def stddev_summary(var): \"\"\"stddev scalar summary :type var: tensorflow.Variable :param var: variable to", "add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean)))", "def mean_summary(var): \"\"\"mean scalar summary :type var: tensorflow.Variable :param var: variable to add", "tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean) def stddev_summary(var): \"\"\"stddev scalar summary :type var:", "with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean) tf.summary.histogram('histogram', var) def summary_image(var, max_outputs=0): \"\"\"image", ":type max_outputs: int :param var: variable to summary :param max_outputs: max output to", "to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) stddev = tf.sqrt(tf.reduce_mean(tf.square(var -", "to summary :param max_outputs: max output to summary image \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.image(\"image\",", "mean))) tf.summary.scalar(\"stddev\", stddev) def histogram_summary(var): \"\"\"histogram summary :type var: tensorflow.Variable :param var: variable", "def stddev_summary(var): \"\"\"stddev scalar summary :type var: tensorflow.Variable :param var: variable to add", "stddev_summary(var): \"\"\"stddev scalar summary :type var: tensorflow.Variable :param var: variable to add summary", "\"\"\"histogram summary :type var: tensorflow.Variable :param var: variable to add summary \"\"\" with", "max_outputs=0): \"\"\"image summary :type var: tensorflow.Variable :type max_outputs: int :param var: variable to", "tf.reduce_min(var)) def summary_loss(var): \"\"\"loss summary loss's scalar and histogram summary :type var: tensorflow.Variable", "var) def max_summary(var): \"\"\"max scalar summary :type var: tensorflow.Variable :param var: variable to", "histogram_summary(var): \"\"\"histogram summary :type var: tensorflow.Variable :param var: variable to add summary \"\"\"", "\"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.histogram('histogram', var) def max_summary(var): \"\"\"max scalar summary :type var: tensorflow.Variable", "summary_image(var, max_outputs=0): \"\"\"image summary :type var: tensorflow.Variable :type max_outputs: int :param var: variable", "tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"max\", tf.reduce_max(var)) def min_summary(var): \"\"\"min summary :type var: tensorflow.Variable :param var: variable", "variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.histogram('histogram', var) def max_summary(var): \"\"\"max scalar", "var) def summary_image(var, max_outputs=0): \"\"\"image summary :type var: tensorflow.Variable :type max_outputs: int :param", "\"\"\"min summary :type var: tensorflow.Variable :param var: variable to add summary \"\"\" with", ":param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean',", "tensorflow.Variable :type max_outputs: int :param var: variable to summary :param max_outputs: max output", "summary :type var: tensorflow.Variable :type max_outputs: int :param var: variable to summary :param", "= tf.reduce_mean(var) tf.summary.scalar('mean', mean) tf.summary.histogram('histogram', var) def summary_image(var, max_outputs=0): \"\"\"image summary :type var:", "\"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean) def stddev_summary(var): \"\"\"stddev scalar summary", "to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"min\", tf.reduce_min(var)) def summary_loss(var): \"\"\"loss summary loss's", "def max_summary(var): \"\"\"max scalar summary :type var: tensorflow.Variable :param var: variable to add", "with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean) def stddev_summary(var): \"\"\"stddev scalar summary :type", "mean) tf.summary.histogram('histogram', var) def summary_image(var, max_outputs=0): \"\"\"image summary :type var: tensorflow.Variable :type max_outputs:", "tf.name_scope(var.name.split(\":\")[0]): tf.summary.histogram('histogram', var) def max_summary(var): \"\"\"max scalar summary :type var: tensorflow.Variable :param var:", "tensorflow.Variable :param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"max\", tf.reduce_max(var)) def", "tf.summary.scalar(\"max\", tf.reduce_max(var)) def min_summary(var): \"\"\"min summary :type var: tensorflow.Variable :param var: variable to", "variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"min\", tf.reduce_min(var)) def summary_loss(var): \"\"\"loss summary", "\"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"min\", tf.reduce_min(var)) def summary_loss(var): \"\"\"loss summary loss's scalar and histogram", "summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"min\", tf.reduce_min(var)) def summary_loss(var): \"\"\"loss summary loss's scalar and", "tf.sqrt(tf.reduce_mean(tf.square(var - mean))) tf.summary.scalar(\"stddev\", stddev) def histogram_summary(var): \"\"\"histogram summary :type var: tensorflow.Variable :param", "var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean)", "summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"max\", tf.reduce_max(var)) def min_summary(var): \"\"\"min summary :type var: tensorflow.Variable", "var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"min\", tf.reduce_min(var)) def summary_loss(var): \"\"\"loss", "tf.summary.scalar(\"min\", tf.reduce_min(var)) def summary_loss(var): \"\"\"loss summary loss's scalar and histogram summary :type var:", "tf.summary.scalar('mean', mean) tf.summary.histogram('histogram', var) def summary_image(var, max_outputs=0): \"\"\"image summary :type var: tensorflow.Variable :type", "\"\"\"image summary :type var: tensorflow.Variable :type max_outputs: int :param var: variable to summary", "- mean))) tf.summary.scalar(\"stddev\", stddev) def histogram_summary(var): \"\"\"histogram summary :type var: tensorflow.Variable :param var:", ":param max_outputs: max output to summary image \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.image(\"image\", var, max_outputs=max_outputs)", "tf.reduce_max(var)) def min_summary(var): \"\"\"min summary :type var: tensorflow.Variable :param var: variable to add", "var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"max\", tf.reduce_max(var)) def min_summary(var): \"\"\"min", "stddev) def histogram_summary(var): \"\"\"histogram summary :type var: tensorflow.Variable :param var: variable to add", "with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"max\", tf.reduce_max(var)) def min_summary(var): \"\"\"min summary :type var: tensorflow.Variable :param var:", "tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"min\", tf.reduce_min(var)) def summary_loss(var): \"\"\"loss summary loss's scalar and histogram summary :type", "def summary_loss(var): \"\"\"loss summary loss's scalar and histogram summary :type var: tensorflow.Variable :param", ":type var: tensorflow.Variable :type max_outputs: int :param var: variable to summary :param max_outputs:", "summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean) def stddev_summary(var): \"\"\"stddev scalar", ":param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"min\", tf.reduce_min(var)) def summary_loss(var):", "tf.reduce_mean(var) tf.summary.scalar('mean', mean) tf.summary.histogram('histogram', var) def summary_image(var, max_outputs=0): \"\"\"image summary :type var: tensorflow.Variable", "histogram summary :type var: tensorflow.Variable :param var: variable to summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]):", "variable to summary :param max_outputs: max output to summary image \"\"\" with tf.name_scope(var.name.split(\":\")[0]):", "summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.histogram('histogram', var) def max_summary(var): \"\"\"max scalar summary :type var:", ":param var: variable to summary :param max_outputs: max output to summary image \"\"\"", "add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"min\", tf.reduce_min(var)) def summary_loss(var): \"\"\"loss summary loss's scalar", "tf.summary.scalar(\"stddev\", stddev) def histogram_summary(var): \"\"\"histogram summary :type var: tensorflow.Variable :param var: variable to", "= tf.reduce_mean(var) tf.summary.scalar('mean', mean) def stddev_summary(var): \"\"\"stddev scalar summary :type var: tensorflow.Variable :param", "with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"min\", tf.reduce_min(var)) def summary_loss(var): \"\"\"loss summary loss's scalar and histogram summary", "tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean) tf.summary.histogram('histogram', var) def summary_image(var, max_outputs=0): \"\"\"image summary", "and histogram summary :type var: tensorflow.Variable :param var: variable to summary \"\"\" with", "tensorflow.Variable :param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var)", "summary_loss(var): \"\"\"loss summary loss's scalar and histogram summary :type var: tensorflow.Variable :param var:", ":type var: tensorflow.Variable :param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"max\",", "summary :param max_outputs: max output to summary image \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.image(\"image\", var,", "tf.reduce_mean(var) stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean))) tf.summary.scalar(\"stddev\", stddev) def histogram_summary(var): \"\"\"histogram summary :type", "tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean))) tf.summary.scalar(\"stddev\", stddev) def histogram_summary(var):", "var: tensorflow.Variable :param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"max\", tf.reduce_max(var))", "var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) stddev =", "tensorflow as tf def mean_summary(var): \"\"\"mean scalar summary :type var: tensorflow.Variable :param var:", "stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean))) tf.summary.scalar(\"stddev\", stddev) def histogram_summary(var): \"\"\"histogram summary :type var:", "tensorflow.Variable :param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"min\", tf.reduce_min(var)) def", "= tf.sqrt(tf.reduce_mean(tf.square(var - mean))) tf.summary.scalar(\"stddev\", stddev) def histogram_summary(var): \"\"\"histogram summary :type var: tensorflow.Variable", "mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean) def stddev_summary(var): \"\"\"stddev scalar summary :type var: tensorflow.Variable", ":param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.histogram('histogram', var) def max_summary(var):", "tensorflow.Variable :param var: variable to summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean',", "= tf.reduce_mean(var) stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean))) tf.summary.scalar(\"stddev\", stddev) def histogram_summary(var): \"\"\"histogram summary", "add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"max\", tf.reduce_max(var)) def min_summary(var): \"\"\"min summary :type var:", "to summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean) tf.summary.histogram('histogram', var) def", "to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"max\", tf.reduce_max(var)) def min_summary(var): \"\"\"min summary :type", "var: tensorflow.Variable :param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean =", "summary :type var: tensorflow.Variable :param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]):", "with tf.name_scope(var.name.split(\":\")[0]): tf.summary.histogram('histogram', var) def max_summary(var): \"\"\"max scalar summary :type var: tensorflow.Variable :param", "def histogram_summary(var): \"\"\"histogram summary :type var: tensorflow.Variable :param var: variable to add summary", "min_summary(var): \"\"\"min summary :type var: tensorflow.Variable :param var: variable to add summary \"\"\"", ":type var: tensorflow.Variable :param var: variable to summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean =", "\"\"\"max scalar summary :type var: tensorflow.Variable :param var: variable to add summary \"\"\"", "to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.histogram('histogram', var) def max_summary(var): \"\"\"max scalar summary", ":param var: variable to summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean)", "max_outputs: int :param var: variable to summary :param max_outputs: max output to summary", "scalar and histogram summary :type var: tensorflow.Variable :param var: variable to summary \"\"\"", "add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.histogram('histogram', var) def max_summary(var): \"\"\"max scalar summary :type", ":type var: tensorflow.Variable :param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.histogram('histogram',", "summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean))) tf.summary.scalar(\"stddev\",", "var: variable to summary :param max_outputs: max output to summary image \"\"\" with", "var: tensorflow.Variable :type max_outputs: int :param var: variable to summary :param max_outputs: max", "tensorflow.Variable :param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.histogram('histogram', var) def", "\"\"\"mean scalar summary :type var: tensorflow.Variable :param var: variable to add summary \"\"\"", "var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.histogram('histogram', var) def max_summary(var): \"\"\"max", "tf def mean_summary(var): \"\"\"mean scalar summary :type var: tensorflow.Variable :param var: variable to", "with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean))) tf.summary.scalar(\"stddev\", stddev) def", "summary loss's scalar and histogram summary :type var: tensorflow.Variable :param var: variable to", "tf.summary.scalar('mean', mean) def stddev_summary(var): \"\"\"stddev scalar summary :type var: tensorflow.Variable :param var: variable", "tf.reduce_mean(var) tf.summary.scalar('mean', mean) def stddev_summary(var): \"\"\"stddev scalar summary :type var: tensorflow.Variable :param var:", "var: tensorflow.Variable :param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"min\", tf.reduce_min(var))", "variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) stddev = tf.sqrt(tf.reduce_mean(tf.square(var", "var: tensorflow.Variable :param var: variable to summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var)", "summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean) tf.summary.histogram('histogram', var) def summary_image(var,", "summary :type var: tensorflow.Variable :param var: variable to summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean", "\"\"\"loss summary loss's scalar and histogram summary :type var: tensorflow.Variable :param var: variable", "\"\"\"tensorflow summary util\"\"\" import tensorflow as tf def mean_summary(var): \"\"\"mean scalar summary :type", "\"\"\"stddev scalar summary :type var: tensorflow.Variable :param var: variable to add summary \"\"\"", ":type var: tensorflow.Variable :param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean", "variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean) def", "def min_summary(var): \"\"\"min summary :type var: tensorflow.Variable :param var: variable to add summary", "def summary_image(var, max_outputs=0): \"\"\"image summary :type var: tensorflow.Variable :type max_outputs: int :param var:", "scalar summary :type var: tensorflow.Variable :param var: variable to add summary \"\"\" with", ":type var: tensorflow.Variable :param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"min\",", "import tensorflow as tf def mean_summary(var): \"\"\"mean scalar summary :type var: tensorflow.Variable :param", "variable to summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean) tf.summary.histogram('histogram', var)", "var: tensorflow.Variable :param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.histogram('histogram', var)", "as tf def mean_summary(var): \"\"\"mean scalar summary :type var: tensorflow.Variable :param var: variable", "variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"max\", tf.reduce_max(var)) def min_summary(var): \"\"\"min summary", "int :param var: variable to summary :param max_outputs: max output to summary image", ":param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) stddev", "add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean) def stddev_summary(var): \"\"\"stddev", "summary util\"\"\" import tensorflow as tf def mean_summary(var): \"\"\"mean scalar summary :type var:", "\"\"\" with tf.name_scope(var.name.split(\":\")[0]): mean = tf.reduce_mean(var) tf.summary.scalar('mean', mean) tf.summary.histogram('histogram', var) def summary_image(var, max_outputs=0):", ":param var: variable to add summary \"\"\" with tf.name_scope(var.name.split(\":\")[0]): tf.summary.scalar(\"max\", tf.reduce_max(var)) def min_summary(var):", "util\"\"\" import tensorflow as tf def mean_summary(var): \"\"\"mean scalar summary :type var: tensorflow.Variable", "tf.summary.histogram('histogram', var) def summary_image(var, max_outputs=0): \"\"\"image summary :type var: tensorflow.Variable :type max_outputs: int", "mean = tf.reduce_mean(var) stddev = tf.sqrt(tf.reduce_mean(tf.square(var - mean))) tf.summary.scalar(\"stddev\", stddev) def histogram_summary(var): \"\"\"histogram" ]
[ "datetime armstrong = datetime(1969, 7, 21, 14, 56, 15) armstrong.date() # datetime.date(1969, 7,", "14, 56, 15) armstrong.date() # datetime.date(1969, 7, 21) armstrong.time() # datetime.time(14, 56, 15)", "armstrong.time() # datetime.time(14, 56, 15) armstrong.weekday() # 0 # in US week starts", "56, 15) armstrong.date() # datetime.date(1969, 7, 21) armstrong.time() # datetime.time(14, 56, 15) armstrong.weekday()", "armstrong.date() # datetime.date(1969, 7, 21) armstrong.time() # datetime.time(14, 56, 15) armstrong.weekday() # 0", "# datetime.time(14, 56, 15) armstrong.weekday() # 0 # in US week starts with", "15) armstrong.date() # datetime.date(1969, 7, 21) armstrong.time() # datetime.time(14, 56, 15) armstrong.weekday() #", "21, 14, 56, 15) armstrong.date() # datetime.date(1969, 7, 21) armstrong.time() # datetime.time(14, 56,", "= datetime(1969, 7, 21, 14, 56, 15) armstrong.date() # datetime.date(1969, 7, 21) armstrong.time()", "7, 21, 14, 56, 15) armstrong.date() # datetime.date(1969, 7, 21) armstrong.time() # datetime.time(14,", "# datetime.date(1969, 7, 21) armstrong.time() # datetime.time(14, 56, 15) armstrong.weekday() # 0 #", "armstrong = datetime(1969, 7, 21, 14, 56, 15) armstrong.date() # datetime.date(1969, 7, 21)", "21) armstrong.time() # datetime.time(14, 56, 15) armstrong.weekday() # 0 # in US week", "from datetime import datetime armstrong = datetime(1969, 7, 21, 14, 56, 15) armstrong.date()", "datetime.time(14, 56, 15) armstrong.weekday() # 0 # in US week starts with Sunday", "7, 21) armstrong.time() # datetime.time(14, 56, 15) armstrong.weekday() # 0 # in US", "import datetime armstrong = datetime(1969, 7, 21, 14, 56, 15) armstrong.date() # datetime.date(1969,", "datetime(1969, 7, 21, 14, 56, 15) armstrong.date() # datetime.date(1969, 7, 21) armstrong.time() #", "datetime import datetime armstrong = datetime(1969, 7, 21, 14, 56, 15) armstrong.date() #", "datetime.date(1969, 7, 21) armstrong.time() # datetime.time(14, 56, 15) armstrong.weekday() # 0 # in" ]
[ "import csv def readfile(filepath, fmt='csv'): with open(filepath, 'r') as f: data = csv.reader(f,", "# -*- coding: utf-8 -*- \"\"\"Read polarity. \"\"\" import csv def readfile(filepath, fmt='csv'):", "coding: utf-8 -*- \"\"\"Read polarity. \"\"\" import csv def readfile(filepath, fmt='csv'): with open(filepath,", "delimiter=',', skipinitialspace=True) next(data) r = {i[0]: int(i[1]) for i in data if not", "\"\"\"Read polarity. \"\"\" import csv def readfile(filepath, fmt='csv'): with open(filepath, 'r') as f:", "readfile(filepath, fmt='csv'): with open(filepath, 'r') as f: data = csv.reader(f, delimiter=',', skipinitialspace=True) next(data)", "f: data = csv.reader(f, delimiter=',', skipinitialspace=True) next(data) r = {i[0]: int(i[1]) for i", "with open(filepath, 'r') as f: data = csv.reader(f, delimiter=',', skipinitialspace=True) next(data) r =", "def readfile(filepath, fmt='csv'): with open(filepath, 'r') as f: data = csv.reader(f, delimiter=',', skipinitialspace=True)", "= csv.reader(f, delimiter=',', skipinitialspace=True) next(data) r = {i[0]: int(i[1]) for i in data", "utf-8 -*- \"\"\"Read polarity. \"\"\" import csv def readfile(filepath, fmt='csv'): with open(filepath, 'r')", "fmt='csv'): with open(filepath, 'r') as f: data = csv.reader(f, delimiter=',', skipinitialspace=True) next(data) r", "data = csv.reader(f, delimiter=',', skipinitialspace=True) next(data) r = {i[0]: int(i[1]) for i in", "csv.reader(f, delimiter=',', skipinitialspace=True) next(data) r = {i[0]: int(i[1]) for i in data if", "as f: data = csv.reader(f, delimiter=',', skipinitialspace=True) next(data) r = {i[0]: int(i[1]) for", "polarity. \"\"\" import csv def readfile(filepath, fmt='csv'): with open(filepath, 'r') as f: data", "open(filepath, 'r') as f: data = csv.reader(f, delimiter=',', skipinitialspace=True) next(data) r = {i[0]:", "'r') as f: data = csv.reader(f, delimiter=',', skipinitialspace=True) next(data) r = {i[0]: int(i[1])", "csv def readfile(filepath, fmt='csv'): with open(filepath, 'r') as f: data = csv.reader(f, delimiter=',',", "next(data) r = {i[0]: int(i[1]) for i in data if not i[0].startswith(\"#\")} return", "skipinitialspace=True) next(data) r = {i[0]: int(i[1]) for i in data if not i[0].startswith(\"#\")}", "r = {i[0]: int(i[1]) for i in data if not i[0].startswith(\"#\")} return r", "\"\"\" import csv def readfile(filepath, fmt='csv'): with open(filepath, 'r') as f: data =", "-*- \"\"\"Read polarity. \"\"\" import csv def readfile(filepath, fmt='csv'): with open(filepath, 'r') as", "-*- coding: utf-8 -*- \"\"\"Read polarity. \"\"\" import csv def readfile(filepath, fmt='csv'): with" ]
[ "Jedi, Tests, Questions # Register your models here. admin.site.register(Planet) admin.site.register(Jedi) class TestsInline(admin.StackedInline): model", "admin.site.register(Jedi) class TestsInline(admin.StackedInline): model = Questions extra = 0 @admin.register(Tests) class QuestionsAdmin(admin.ModelAdmin): inlines", "your models here. admin.site.register(Planet) admin.site.register(Jedi) class TestsInline(admin.StackedInline): model = Questions extra = 0", "from django.contrib import admin from .models import Planet, Jedi, Tests, Questions # Register", "django.contrib import admin from .models import Planet, Jedi, Tests, Questions # Register your", "admin.site.register(Planet) admin.site.register(Jedi) class TestsInline(admin.StackedInline): model = Questions extra = 0 @admin.register(Tests) class QuestionsAdmin(admin.ModelAdmin):", "TestsInline(admin.StackedInline): model = Questions extra = 0 @admin.register(Tests) class QuestionsAdmin(admin.ModelAdmin): inlines = [", "admin from .models import Planet, Jedi, Tests, Questions # Register your models here.", "import admin from .models import Planet, Jedi, Tests, Questions # Register your models", "Questions # Register your models here. admin.site.register(Planet) admin.site.register(Jedi) class TestsInline(admin.StackedInline): model = Questions", "Tests, Questions # Register your models here. admin.site.register(Planet) admin.site.register(Jedi) class TestsInline(admin.StackedInline): model =", "model = Questions extra = 0 @admin.register(Tests) class QuestionsAdmin(admin.ModelAdmin): inlines = [ TestsInline,", ".models import Planet, Jedi, Tests, Questions # Register your models here. admin.site.register(Planet) admin.site.register(Jedi)", "import Planet, Jedi, Tests, Questions # Register your models here. admin.site.register(Planet) admin.site.register(Jedi) class", "class TestsInline(admin.StackedInline): model = Questions extra = 0 @admin.register(Tests) class QuestionsAdmin(admin.ModelAdmin): inlines =", "Register your models here. admin.site.register(Planet) admin.site.register(Jedi) class TestsInline(admin.StackedInline): model = Questions extra =", "models here. admin.site.register(Planet) admin.site.register(Jedi) class TestsInline(admin.StackedInline): model = Questions extra = 0 @admin.register(Tests)", "= Questions extra = 0 @admin.register(Tests) class QuestionsAdmin(admin.ModelAdmin): inlines = [ TestsInline, ]", "Planet, Jedi, Tests, Questions # Register your models here. admin.site.register(Planet) admin.site.register(Jedi) class TestsInline(admin.StackedInline):", "here. admin.site.register(Planet) admin.site.register(Jedi) class TestsInline(admin.StackedInline): model = Questions extra = 0 @admin.register(Tests) class", "# Register your models here. admin.site.register(Planet) admin.site.register(Jedi) class TestsInline(admin.StackedInline): model = Questions extra", "from .models import Planet, Jedi, Tests, Questions # Register your models here. admin.site.register(Planet)" ]
[ "Solution(object): def nthSuperUglyNumber(self, n, primes): \"\"\" :type n: int :type primes: List[int] :rtype:", "List[int] :rtype: int \"\"\" res = [1] hashmap = {val:0 for val in", "[float('inf')] * len(primes) while len(res) < n: newm = [res[hashmap[p]] * p for", ":type primes: List[int] :rtype: int \"\"\" res = [1] hashmap = {val:0 for", "{val:0 for val in primes} m = [float('inf')] * len(primes) while len(res) <", "newm = [res[hashmap[p]] * p for p in primes] mn = min(newm) hashmap[primes[newm.index(mn)]]", "= min(newm) hashmap[primes[newm.index(mn)]] += 1 if mn not in res: res.append(mn) else: continue", "primes: List[int] :rtype: int \"\"\" res = [1] hashmap = {val:0 for val", "not in res: res.append(mn) else: continue return res[-1] a = Solution() print a.nthSuperUglyNumber(12,", "= [float('inf')] * len(primes) while len(res) < n: newm = [res[hashmap[p]] * p", "while len(res) < n: newm = [res[hashmap[p]] * p for p in primes]", "* p for p in primes] mn = min(newm) hashmap[primes[newm.index(mn)]] += 1 if", "int \"\"\" res = [1] hashmap = {val:0 for val in primes} m", "mn not in res: res.append(mn) else: continue return res[-1] a = Solution() print", "p for p in primes] mn = min(newm) hashmap[primes[newm.index(mn)]] += 1 if mn", "n: int :type primes: List[int] :rtype: int \"\"\" res = [1] hashmap =", "res: res.append(mn) else: continue return res[-1] a = Solution() print a.nthSuperUglyNumber(12, [2, 7,", "n, primes): \"\"\" :type n: int :type primes: List[int] :rtype: int \"\"\" res", "= [1] hashmap = {val:0 for val in primes} m = [float('inf')] *", "in primes] mn = min(newm) hashmap[primes[newm.index(mn)]] += 1 if mn not in res:", "int :type primes: List[int] :rtype: int \"\"\" res = [1] hashmap = {val:0", "p in primes] mn = min(newm) hashmap[primes[newm.index(mn)]] += 1 if mn not in", "primes} m = [float('inf')] * len(primes) while len(res) < n: newm = [res[hashmap[p]]", "\"\"\" :type n: int :type primes: List[int] :rtype: int \"\"\" res = [1]", "hashmap = {val:0 for val in primes} m = [float('inf')] * len(primes) while", "< n: newm = [res[hashmap[p]] * p for p in primes] mn =", "[res[hashmap[p]] * p for p in primes] mn = min(newm) hashmap[primes[newm.index(mn)]] += 1", "m = [float('inf')] * len(primes) while len(res) < n: newm = [res[hashmap[p]] *", "res.append(mn) else: continue return res[-1] a = Solution() print a.nthSuperUglyNumber(12, [2, 7, 13,", "len(primes) while len(res) < n: newm = [res[hashmap[p]] * p for p in", "in primes} m = [float('inf')] * len(primes) while len(res) < n: newm =", ":rtype: int \"\"\" res = [1] hashmap = {val:0 for val in primes}", "1 if mn not in res: res.append(mn) else: continue return res[-1] a =", "hashmap[primes[newm.index(mn)]] += 1 if mn not in res: res.append(mn) else: continue return res[-1]", "= [res[hashmap[p]] * p for p in primes] mn = min(newm) hashmap[primes[newm.index(mn)]] +=", "in res: res.append(mn) else: continue return res[-1] a = Solution() print a.nthSuperUglyNumber(12, [2,", "for p in primes] mn = min(newm) hashmap[primes[newm.index(mn)]] += 1 if mn not", "class Solution(object): def nthSuperUglyNumber(self, n, primes): \"\"\" :type n: int :type primes: List[int]", "for val in primes} m = [float('inf')] * len(primes) while len(res) < n:", "val in primes} m = [float('inf')] * len(primes) while len(res) < n: newm", "if mn not in res: res.append(mn) else: continue return res[-1] a = Solution()", "* len(primes) while len(res) < n: newm = [res[hashmap[p]] * p for p", "= {val:0 for val in primes} m = [float('inf')] * len(primes) while len(res)", "mn = min(newm) hashmap[primes[newm.index(mn)]] += 1 if mn not in res: res.append(mn) else:", "min(newm) hashmap[primes[newm.index(mn)]] += 1 if mn not in res: res.append(mn) else: continue return", "def nthSuperUglyNumber(self, n, primes): \"\"\" :type n: int :type primes: List[int] :rtype: int", ":type n: int :type primes: List[int] :rtype: int \"\"\" res = [1] hashmap", "n: newm = [res[hashmap[p]] * p for p in primes] mn = min(newm)", "primes): \"\"\" :type n: int :type primes: List[int] :rtype: int \"\"\" res =", "len(res) < n: newm = [res[hashmap[p]] * p for p in primes] mn", "primes] mn = min(newm) hashmap[primes[newm.index(mn)]] += 1 if mn not in res: res.append(mn)", "else: continue return res[-1] a = Solution() print a.nthSuperUglyNumber(12, [2, 7, 13, 19])", "+= 1 if mn not in res: res.append(mn) else: continue return res[-1] a", "nthSuperUglyNumber(self, n, primes): \"\"\" :type n: int :type primes: List[int] :rtype: int \"\"\"", "res = [1] hashmap = {val:0 for val in primes} m = [float('inf')]", "\"\"\" res = [1] hashmap = {val:0 for val in primes} m =", "[1] hashmap = {val:0 for val in primes} m = [float('inf')] * len(primes)" ]
[ "- start_time # timeit statement print('Execution time: {0:.4f} sec'.format(elapsed)) # %% plotting section", "attribute why descriptions with why codes from funcWhyID import funcWhyID [df1, whyID, whyIDsum]", "header=0) >>>>>>> Alex1 # filter dataframe zero (raw NHTS2009) to columns listed in", "as pd import timeit # initialize values start_time = timeit.default_timer() <<<<<<< HEAD #", "funcWhyID [df1, whyID, whyIDsum] = funcWhyID(df1, whyID, whyIDsum) whyIDsumList = set(df1['whyDescSmry']) # build", "# NHTS2009 Data Location for Alex's Laptop #df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) #", "= pd.read_csv(r'C:\\Users\\Alex\\Documents\\NHTS_2017\\trippub.CSV', header=0) #df0 = pd.read_csv(r'C:\\Users\\<NAME>\\Documents\\NHTS_2017\\trippub.CSV', header=0) >>>>>>> Alex1 # filter dataframe zero", "from funcWhyID import funcWhyID [df1, whyID, whyIDsum] = funcWhyID(df1, whyID, whyIDsum) whyIDsumList =", "Alex's Lab Computer df0 = pd.read_csv(r'C:\\Users\\Alex\\Documents\\NHTS_2017\\trippub.CSV', header=0) #df0 = pd.read_csv(r'C:\\Users\\<NAME>\\Documents\\NHTS_2017\\trippub.CSV', header=0) >>>>>>> Alex1", "import pandas as pd import timeit # initialize values start_time = timeit.default_timer() <<<<<<<", "# shows all column headers colNames1 = list(df1) # shows all column headers", "= df1.tail(5) # shows last n rows df0['TRIPPURP'].describe() # print data shapes (rows", "#df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) # NHTS2009 Data Location for Alex's Lab Computer", "n rows lastNrows1 = df1.tail(5) # shows last n rows df0['TRIPPURP'].describe() # print", "elapsed = timeit.default_timer() - start_time # timeit statement print('Execution time: {0:.4f} sec'.format(elapsed)) #", "df1.shape) elapsed = timeit.default_timer() - start_time # timeit statement print('Execution time: {0:.4f} sec'.format(elapsed))", "columns listed in filter df1 = df0.filter(['TDCASEID','TRAVDAY','STRTTIME','DWELTIME','ENDTIME','TRIPPURP', 'WHYFROM','WHYTO','WHYTRP1S','WHYTRP90','WHODROVE', 'CENSUS_D','CENSUS_R','DRIVER','AWAYHOME','FRSTHM','TDTRPNUM', 'TDWKND','TRPACCMP','TRPHHACC','TRVLCMIN','TRVL_MIN','TRWAITTM', 'VEHTYPE','VEHYEAR','VMT_MILE','HHFAMINC','HHSIZE','HHSTATE','HOMEOWN', 'NUMADLT','NUMONTRIP','PRMACT','PAYPROF','PROXY','PRMACT','R_AGE','R_SEX'], axis=1)", "whyIDsum) whyIDsumList = set(df1['whyDescSmry']) # build out dataframe table colNames0 = list(df0) #", "# filter dataframe zero (raw NHTS2009) to columns listed in filter df1 =", "n rows firstNrows1 = df1.head(25) # shows first n rows lastNrows0 = df0.tail(5)", "pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) ======= # NHTS2009 Data Location for Alex's Laptop #df0 =", "# shows first n rows lastNrows0 = df0.tail(5) # shows last n rows", "Data df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) ======= # NHTS2009 Data Location for Alex's", "[df1, whyID, whyIDsum] = funcWhyID(df1, whyID, whyIDsum) whyIDsumList = set(df1['whyDescSmry']) # build out", "print data shapes (rows x columns) print('Dataframe Raw Shape:', df0.shape) print('Dataframe Filtered Shape:',", "headers firstNrows0 = df0.head(25) # shows first n rows firstNrows1 = df1.head(25) #", "shows first n rows lastNrows0 = df0.tail(5) # shows last n rows lastNrows1", "timeit # initialize values start_time = timeit.default_timer() <<<<<<< HEAD # Import NHTS2009 Data", "initialize values start_time = timeit.default_timer() <<<<<<< HEAD # Import NHTS2009 Data df0 =", "first n rows firstNrows1 = df1.head(25) # shows first n rows lastNrows0 =", "first n rows lastNrows0 = df0.tail(5) # shows last n rows lastNrows1 =", "= timeit.default_timer() - start_time # timeit statement print('Execution time: {0:.4f} sec'.format(elapsed)) # %%", "# shows first n rows firstNrows1 = df1.head(25) # shows first n rows", "======= # NHTS2009 Data Location for Alex's Laptop #df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0)", "# shows all column headers firstNrows0 = df0.head(25) # shows first n rows", "Created on Sun Feb 18 22:57:53 2018 @author: <NAME> \"\"\" # import libraries", "# initialize values start_time = timeit.default_timer() <<<<<<< HEAD # Import NHTS2009 Data df0", "Location for Alex's Lab Computer df0 = pd.read_csv(r'C:\\Users\\Alex\\Documents\\NHTS_2017\\trippub.CSV', header=0) #df0 = pd.read_csv(r'C:\\Users\\<NAME>\\Documents\\NHTS_2017\\trippub.CSV', header=0)", "time: {0:.4f} sec'.format(elapsed)) # %% plotting section # plots histogram #plotHistSmry = df1['WHYTRP1S'].hist(bins=25)", "lastNrows0 = df0.tail(5) # shows last n rows lastNrows1 = df1.tail(5) # shows", "Computer df0 = pd.read_csv(r'C:\\Users\\Alex\\Documents\\NHTS_2017\\trippub.CSV', header=0) #df0 = pd.read_csv(r'C:\\Users\\<NAME>\\Documents\\NHTS_2017\\trippub.CSV', header=0) >>>>>>> Alex1 # filter", "= df1.head(25) # shows first n rows lastNrows0 = df0.tail(5) # shows last", "Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) # NHTS2009 Data Location for Alex's Lab Computer df0 = pd.read_csv(r'C:\\Users\\Alex\\Documents\\NHTS_2017\\trippub.CSV',", "column headers colNames1 = list(df1) # shows all column headers firstNrows0 = df0.head(25)", "n rows df0['TRIPPURP'].describe() # print data shapes (rows x columns) print('Dataframe Raw Shape:',", "Feb 18 22:57:53 2018 @author: <NAME> \"\"\" # import libraries import pandas as", "= df0.head(25) # shows first n rows firstNrows1 = df1.head(25) # shows first", "plotting section # plots histogram #plotHistSmry = df1['WHYTRP1S'].hist(bins=25) #plotPieSmry = plt.pie(df1['WHYTRP1S']) #plotPieSmry =", "rows lastNrows1 = df1.tail(5) # shows last n rows df0['TRIPPURP'].describe() # print data", "libraries import pandas as pd import timeit # initialize values start_time = timeit.default_timer()", "NHTS2009 Data df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) ======= # NHTS2009 Data Location for", "18 22:57:53 2018 @author: <NAME> \"\"\" # import libraries import pandas as pd", "Lab Computer df0 = pd.read_csv(r'C:\\Users\\Alex\\Documents\\NHTS_2017\\trippub.CSV', header=0) #df0 = pd.read_csv(r'C:\\Users\\<NAME>\\Documents\\NHTS_2017\\trippub.CSV', header=0) >>>>>>> Alex1 #", "whyID, whyIDsum) whyIDsumList = set(df1['whyDescSmry']) # build out dataframe table colNames0 = list(df0)", "import libraries import pandas as pd import timeit # initialize values start_time =", "(raw NHTS2009) to columns listed in filter df1 = df0.filter(['TDCASEID','TRAVDAY','STRTTIME','DWELTIME','ENDTIME','TRIPPURP', 'WHYFROM','WHYTO','WHYTRP1S','WHYTRP90','WHODROVE', 'CENSUS_D','CENSUS_R','DRIVER','AWAYHOME','FRSTHM','TDTRPNUM', 'TDWKND','TRPACCMP','TRPHHACC','TRVLCMIN','TRVL_MIN','TRWAITTM',", "shows first n rows firstNrows1 = df1.head(25) # shows first n rows lastNrows0", "import timeit # initialize values start_time = timeit.default_timer() <<<<<<< HEAD # Import NHTS2009", "for Alex's Laptop #df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) # NHTS2009 Data Location for", "print('Execution time: {0:.4f} sec'.format(elapsed)) # %% plotting section # plots histogram #plotHistSmry =", "values start_time = timeit.default_timer() <<<<<<< HEAD # Import NHTS2009 Data df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140", "Location for Alex's Laptop #df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) # NHTS2009 Data Location", "on Sun Feb 18 22:57:53 2018 @author: <NAME> \"\"\" # import libraries import", "# NHTS2009 Data Location for Alex's Lab Computer df0 = pd.read_csv(r'C:\\Users\\Alex\\Documents\\NHTS_2017\\trippub.CSV', header=0) #df0", "function call to attribute why descriptions with why codes from funcWhyID import funcWhyID", "# %% plotting section # plots histogram #plotHistSmry = df1['WHYTRP1S'].hist(bins=25) #plotPieSmry = plt.pie(df1['WHYTRP1S'])", "shows last n rows df0['TRIPPURP'].describe() # print data shapes (rows x columns) print('Dataframe", "= df1['WHYTRP1S'].hist(bins=25) #plotPieSmry = plt.pie(df1['WHYTRP1S']) #plotPieSmry = plt.pie(df1['WHYTRP1S'], labels=whyIDsumList, autopct='%1.0f%%) #plt.plot(\"whyDescSmry\",type=\"bar\") #df1[\"WHYFROM\"].plot(kind=\"bar\") #first5rows1['whyDescSmry'].hist()", "coding: utf-8 -*- \"\"\" Created on Sun Feb 18 22:57:53 2018 @author: <NAME>", "header=0) #df0 = pd.read_csv(r'C:\\Users\\<NAME>\\Documents\\NHTS_2017\\trippub.CSV', header=0) >>>>>>> Alex1 # filter dataframe zero (raw NHTS2009)", "filter df1 = df0.filter(['TDCASEID','TRAVDAY','STRTTIME','DWELTIME','ENDTIME','TRIPPURP', 'WHYFROM','WHYTO','WHYTRP1S','WHYTRP90','WHODROVE', 'CENSUS_D','CENSUS_R','DRIVER','AWAYHOME','FRSTHM','TDTRPNUM', 'TDWKND','TRPACCMP','TRPHHACC','TRVLCMIN','TRVL_MIN','TRWAITTM', 'VEHTYPE','VEHYEAR','VMT_MILE','HHFAMINC','HHSIZE','HHSTATE','HOMEOWN', 'NUMADLT','NUMONTRIP','PRMACT','PAYPROF','PROXY','PRMACT','R_AGE','R_SEX'], axis=1) # function call", "{0:.4f} sec'.format(elapsed)) # %% plotting section # plots histogram #plotHistSmry = df1['WHYTRP1S'].hist(bins=25) #plotPieSmry", "filter dataframe zero (raw NHTS2009) to columns listed in filter df1 = df0.filter(['TDCASEID','TRAVDAY','STRTTIME','DWELTIME','ENDTIME','TRIPPURP',", "import funcWhyID [df1, whyID, whyIDsum] = funcWhyID(df1, whyID, whyIDsum) whyIDsumList = set(df1['whyDescSmry']) #", "df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) ======= # NHTS2009 Data Location for Alex's Laptop", "build out dataframe table colNames0 = list(df0) # shows all column headers colNames1", "Shape:', df0.shape) print('Dataframe Filtered Shape:', df1.shape) elapsed = timeit.default_timer() - start_time # timeit", "colNames0 = list(df0) # shows all column headers colNames1 = list(df1) # shows", "set(df1['whyDescSmry']) # build out dataframe table colNames0 = list(df0) # shows all column", "table colNames0 = list(df0) # shows all column headers colNames1 = list(df1) #", "columns) print('Dataframe Raw Shape:', df0.shape) print('Dataframe Filtered Shape:', df1.shape) elapsed = timeit.default_timer() -", "start_time # timeit statement print('Execution time: {0:.4f} sec'.format(elapsed)) # %% plotting section #", "header=0) ======= # NHTS2009 Data Location for Alex's Laptop #df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV',", "Data Location for Alex's Laptop #df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) # NHTS2009 Data", "'VEHTYPE','VEHYEAR','VMT_MILE','HHFAMINC','HHSIZE','HHSTATE','HOMEOWN', 'NUMADLT','NUMONTRIP','PRMACT','PAYPROF','PROXY','PRMACT','R_AGE','R_SEX'], axis=1) # function call to attribute why descriptions with why codes", "n rows lastNrows0 = df0.tail(5) # shows last n rows lastNrows1 = df1.tail(5)", "<NAME> \"\"\" # import libraries import pandas as pd import timeit # initialize", "to attribute why descriptions with why codes from funcWhyID import funcWhyID [df1, whyID,", "start_time = timeit.default_timer() <<<<<<< HEAD # Import NHTS2009 Data df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV',", "zero (raw NHTS2009) to columns listed in filter df1 = df0.filter(['TDCASEID','TRAVDAY','STRTTIME','DWELTIME','ENDTIME','TRIPPURP', 'WHYFROM','WHYTO','WHYTRP1S','WHYTRP90','WHODROVE', 'CENSUS_D','CENSUS_R','DRIVER','AWAYHOME','FRSTHM','TDTRPNUM',", "all column headers firstNrows0 = df0.head(25) # shows first n rows firstNrows1 =", "df0['TRIPPURP'].describe() # print data shapes (rows x columns) print('Dataframe Raw Shape:', df0.shape) print('Dataframe", "2018 @author: <NAME> \"\"\" # import libraries import pandas as pd import timeit", "shows all column headers colNames1 = list(df1) # shows all column headers firstNrows0", "all column headers colNames1 = list(df1) # shows all column headers firstNrows0 =", "list(df0) # shows all column headers colNames1 = list(df1) # shows all column", "Shape:', df1.shape) elapsed = timeit.default_timer() - start_time # timeit statement print('Execution time: {0:.4f}", "NHTS2009) to columns listed in filter df1 = df0.filter(['TDCASEID','TRAVDAY','STRTTIME','DWELTIME','ENDTIME','TRIPPURP', 'WHYFROM','WHYTO','WHYTRP1S','WHYTRP90','WHODROVE', 'CENSUS_D','CENSUS_R','DRIVER','AWAYHOME','FRSTHM','TDTRPNUM', 'TDWKND','TRPACCMP','TRPHHACC','TRVLCMIN','TRVL_MIN','TRWAITTM', 'VEHTYPE','VEHYEAR','VMT_MILE','HHFAMINC','HHSIZE','HHSTATE','HOMEOWN',", "Import NHTS2009 Data df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) ======= # NHTS2009 Data Location", "'WHYFROM','WHYTO','WHYTRP1S','WHYTRP90','WHODROVE', 'CENSUS_D','CENSUS_R','DRIVER','AWAYHOME','FRSTHM','TDTRPNUM', 'TDWKND','TRPACCMP','TRPHHACC','TRVLCMIN','TRVL_MIN','TRWAITTM', 'VEHTYPE','VEHYEAR','VMT_MILE','HHFAMINC','HHSIZE','HHSTATE','HOMEOWN', 'NUMADLT','NUMONTRIP','PRMACT','PAYPROF','PROXY','PRMACT','R_AGE','R_SEX'], axis=1) # function call to attribute why descriptions", "\"\"\" # import libraries import pandas as pd import timeit # initialize values", "= set(df1['whyDescSmry']) # build out dataframe table colNames0 = list(df0) # shows all", "%% plotting section # plots histogram #plotHistSmry = df1['WHYTRP1S'].hist(bins=25) #plotPieSmry = plt.pie(df1['WHYTRP1S']) #plotPieSmry", "# function call to attribute why descriptions with why codes from funcWhyID import", "Data Location for Alex's Lab Computer df0 = pd.read_csv(r'C:\\Users\\Alex\\Documents\\NHTS_2017\\trippub.CSV', header=0) #df0 = pd.read_csv(r'C:\\Users\\<NAME>\\Documents\\NHTS_2017\\trippub.CSV',", "Raw Shape:', df0.shape) print('Dataframe Filtered Shape:', df1.shape) elapsed = timeit.default_timer() - start_time #", "# build out dataframe table colNames0 = list(df0) # shows all column headers", "= funcWhyID(df1, whyID, whyIDsum) whyIDsumList = set(df1['whyDescSmry']) # build out dataframe table colNames0", "dataframe table colNames0 = list(df0) # shows all column headers colNames1 = list(df1)", "pd import timeit # initialize values start_time = timeit.default_timer() <<<<<<< HEAD # Import", "lastNrows1 = df1.tail(5) # shows last n rows df0['TRIPPURP'].describe() # print data shapes", "= pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) ======= # NHTS2009 Data Location for Alex's Laptop #df0", "pd.read_csv(r'C:\\Users\\Alex\\Documents\\NHTS_2017\\trippub.CSV', header=0) #df0 = pd.read_csv(r'C:\\Users\\<NAME>\\Documents\\NHTS_2017\\trippub.CSV', header=0) >>>>>>> Alex1 # filter dataframe zero (raw", "HEAD # Import NHTS2009 Data df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) ======= # NHTS2009", "#plotHistSmry = df1['WHYTRP1S'].hist(bins=25) #plotPieSmry = plt.pie(df1['WHYTRP1S']) #plotPieSmry = plt.pie(df1['WHYTRP1S'], labels=whyIDsumList, autopct='%1.0f%%) #plt.plot(\"whyDescSmry\",type=\"bar\") #df1[\"WHYFROM\"].plot(kind=\"bar\")", "histogram #plotHistSmry = df1['WHYTRP1S'].hist(bins=25) #plotPieSmry = plt.pie(df1['WHYTRP1S']) #plotPieSmry = plt.pie(df1['WHYTRP1S'], labels=whyIDsumList, autopct='%1.0f%%) #plt.plot(\"whyDescSmry\",type=\"bar\")", "with why codes from funcWhyID import funcWhyID [df1, whyID, whyIDsum] = funcWhyID(df1, whyID,", "-*- \"\"\" Created on Sun Feb 18 22:57:53 2018 @author: <NAME> \"\"\" #", "codes from funcWhyID import funcWhyID [df1, whyID, whyIDsum] = funcWhyID(df1, whyID, whyIDsum) whyIDsumList", "shows all column headers firstNrows0 = df0.head(25) # shows first n rows firstNrows1", "utf-8 -*- \"\"\" Created on Sun Feb 18 22:57:53 2018 @author: <NAME> \"\"\"", "df1.tail(5) # shows last n rows df0['TRIPPURP'].describe() # print data shapes (rows x", "why descriptions with why codes from funcWhyID import funcWhyID [df1, whyID, whyIDsum] =", "listed in filter df1 = df0.filter(['TDCASEID','TRAVDAY','STRTTIME','DWELTIME','ENDTIME','TRIPPURP', 'WHYFROM','WHYTO','WHYTRP1S','WHYTRP90','WHODROVE', 'CENSUS_D','CENSUS_R','DRIVER','AWAYHOME','FRSTHM','TDTRPNUM', 'TDWKND','TRPACCMP','TRPHHACC','TRVLCMIN','TRVL_MIN','TRWAITTM', 'VEHTYPE','VEHYEAR','VMT_MILE','HHFAMINC','HHSIZE','HHSTATE','HOMEOWN', 'NUMADLT','NUMONTRIP','PRMACT','PAYPROF','PROXY','PRMACT','R_AGE','R_SEX'], axis=1) #", "timeit.default_timer() - start_time # timeit statement print('Execution time: {0:.4f} sec'.format(elapsed)) # %% plotting", "print('Dataframe Filtered Shape:', df1.shape) elapsed = timeit.default_timer() - start_time # timeit statement print('Execution", "'TDWKND','TRPACCMP','TRPHHACC','TRVLCMIN','TRVL_MIN','TRWAITTM', 'VEHTYPE','VEHYEAR','VMT_MILE','HHFAMINC','HHSIZE','HHSTATE','HOMEOWN', 'NUMADLT','NUMONTRIP','PRMACT','PAYPROF','PROXY','PRMACT','R_AGE','R_SEX'], axis=1) # function call to attribute why descriptions with why", "= pd.read_csv(r'C:\\Users\\<NAME>\\Documents\\NHTS_2017\\trippub.CSV', header=0) >>>>>>> Alex1 # filter dataframe zero (raw NHTS2009) to columns", "# print data shapes (rows x columns) print('Dataframe Raw Shape:', df0.shape) print('Dataframe Filtered", "Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) ======= # NHTS2009 Data Location for Alex's Laptop #df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140", "# Import NHTS2009 Data df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) ======= # NHTS2009 Data", "= list(df1) # shows all column headers firstNrows0 = df0.head(25) # shows first", "column headers firstNrows0 = df0.head(25) # shows first n rows firstNrows1 = df1.head(25)", "'CENSUS_D','CENSUS_R','DRIVER','AWAYHOME','FRSTHM','TDTRPNUM', 'TDWKND','TRPACCMP','TRPHHACC','TRVLCMIN','TRVL_MIN','TRWAITTM', 'VEHTYPE','VEHYEAR','VMT_MILE','HHFAMINC','HHSIZE','HHSTATE','HOMEOWN', 'NUMADLT','NUMONTRIP','PRMACT','PAYPROF','PROXY','PRMACT','R_AGE','R_SEX'], axis=1) # function call to attribute why descriptions with", "in filter df1 = df0.filter(['TDCASEID','TRAVDAY','STRTTIME','DWELTIME','ENDTIME','TRIPPURP', 'WHYFROM','WHYTO','WHYTRP1S','WHYTRP90','WHODROVE', 'CENSUS_D','CENSUS_R','DRIVER','AWAYHOME','FRSTHM','TDTRPNUM', 'TDWKND','TRPACCMP','TRPHHACC','TRVLCMIN','TRVL_MIN','TRWAITTM', 'VEHTYPE','VEHYEAR','VMT_MILE','HHFAMINC','HHSIZE','HHSTATE','HOMEOWN', 'NUMADLT','NUMONTRIP','PRMACT','PAYPROF','PROXY','PRMACT','R_AGE','R_SEX'], axis=1) # function", "df0.filter(['TDCASEID','TRAVDAY','STRTTIME','DWELTIME','ENDTIME','TRIPPURP', 'WHYFROM','WHYTO','WHYTRP1S','WHYTRP90','WHODROVE', 'CENSUS_D','CENSUS_R','DRIVER','AWAYHOME','FRSTHM','TDTRPNUM', 'TDWKND','TRPACCMP','TRPHHACC','TRVLCMIN','TRVL_MIN','TRWAITTM', 'VEHTYPE','VEHYEAR','VMT_MILE','HHFAMINC','HHSIZE','HHSTATE','HOMEOWN', 'NUMADLT','NUMONTRIP','PRMACT','PAYPROF','PROXY','PRMACT','R_AGE','R_SEX'], axis=1) # function call to attribute why", "last n rows lastNrows1 = df1.tail(5) # shows last n rows df0['TRIPPURP'].describe() #", "NHTS2009 Data Location for Alex's Lab Computer df0 = pd.read_csv(r'C:\\Users\\Alex\\Documents\\NHTS_2017\\trippub.CSV', header=0) #df0 =", "section # plots histogram #plotHistSmry = df1['WHYTRP1S'].hist(bins=25) #plotPieSmry = plt.pie(df1['WHYTRP1S']) #plotPieSmry = plt.pie(df1['WHYTRP1S'],", "firstNrows0 = df0.head(25) # shows first n rows firstNrows1 = df1.head(25) # shows", "df1 = df0.filter(['TDCASEID','TRAVDAY','STRTTIME','DWELTIME','ENDTIME','TRIPPURP', 'WHYFROM','WHYTO','WHYTRP1S','WHYTRP90','WHODROVE', 'CENSUS_D','CENSUS_R','DRIVER','AWAYHOME','FRSTHM','TDTRPNUM', 'TDWKND','TRPACCMP','TRPHHACC','TRVLCMIN','TRVL_MIN','TRWAITTM', 'VEHTYPE','VEHYEAR','VMT_MILE','HHFAMINC','HHSIZE','HHSTATE','HOMEOWN', 'NUMADLT','NUMONTRIP','PRMACT','PAYPROF','PROXY','PRMACT','R_AGE','R_SEX'], axis=1) # function call to", "df0.shape) print('Dataframe Filtered Shape:', df1.shape) elapsed = timeit.default_timer() - start_time # timeit statement", "shapes (rows x columns) print('Dataframe Raw Shape:', df0.shape) print('Dataframe Filtered Shape:', df1.shape) elapsed", "df0 = pd.read_csv(r'C:\\Users\\Alex\\Documents\\NHTS_2017\\trippub.CSV', header=0) #df0 = pd.read_csv(r'C:\\Users\\<NAME>\\Documents\\NHTS_2017\\trippub.CSV', header=0) >>>>>>> Alex1 # filter dataframe", "timeit statement print('Execution time: {0:.4f} sec'.format(elapsed)) # %% plotting section # plots histogram", "Alex's Laptop #df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) # NHTS2009 Data Location for Alex's", "= list(df0) # shows all column headers colNames1 = list(df1) # shows all", "whyID, whyIDsum] = funcWhyID(df1, whyID, whyIDsum) whyIDsumList = set(df1['whyDescSmry']) # build out dataframe", "-*- coding: utf-8 -*- \"\"\" Created on Sun Feb 18 22:57:53 2018 @author:", "funcWhyID(df1, whyID, whyIDsum) whyIDsumList = set(df1['whyDescSmry']) # build out dataframe table colNames0 =", "list(df1) # shows all column headers firstNrows0 = df0.head(25) # shows first n", "Filtered Shape:', df1.shape) elapsed = timeit.default_timer() - start_time # timeit statement print('Execution time:", "dataframe zero (raw NHTS2009) to columns listed in filter df1 = df0.filter(['TDCASEID','TRAVDAY','STRTTIME','DWELTIME','ENDTIME','TRIPPURP', 'WHYFROM','WHYTO','WHYTRP1S','WHYTRP90','WHODROVE',", "rows lastNrows0 = df0.tail(5) # shows last n rows lastNrows1 = df1.tail(5) #", "firstNrows1 = df1.head(25) # shows first n rows lastNrows0 = df0.tail(5) # shows", "<<<<<<< HEAD # Import NHTS2009 Data df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) ======= #", "funcWhyID import funcWhyID [df1, whyID, whyIDsum] = funcWhyID(df1, whyID, whyIDsum) whyIDsumList = set(df1['whyDescSmry'])", "# import libraries import pandas as pd import timeit # initialize values start_time", "'NUMADLT','NUMONTRIP','PRMACT','PAYPROF','PROXY','PRMACT','R_AGE','R_SEX'], axis=1) # function call to attribute why descriptions with why codes from", "= df0.tail(5) # shows last n rows lastNrows1 = df1.tail(5) # shows last", "pandas as pd import timeit # initialize values start_time = timeit.default_timer() <<<<<<< HEAD", "# plots histogram #plotHistSmry = df1['WHYTRP1S'].hist(bins=25) #plotPieSmry = plt.pie(df1['WHYTRP1S']) #plotPieSmry = plt.pie(df1['WHYTRP1S'], labels=whyIDsumList,", "NHTS2009 Data Location for Alex's Laptop #df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) # NHTS2009", "plots histogram #plotHistSmry = df1['WHYTRP1S'].hist(bins=25) #plotPieSmry = plt.pie(df1['WHYTRP1S']) #plotPieSmry = plt.pie(df1['WHYTRP1S'], labels=whyIDsumList, autopct='%1.0f%%)", "whyIDsum] = funcWhyID(df1, whyID, whyIDsum) whyIDsumList = set(df1['whyDescSmry']) # build out dataframe table", "df0.head(25) # shows first n rows firstNrows1 = df1.head(25) # shows first n", "df0.tail(5) # shows last n rows lastNrows1 = df1.tail(5) # shows last n", "x columns) print('Dataframe Raw Shape:', df0.shape) print('Dataframe Filtered Shape:', df1.shape) elapsed = timeit.default_timer()", "shows last n rows lastNrows1 = df1.tail(5) # shows last n rows df0['TRIPPURP'].describe()", ">>>>>>> Alex1 # filter dataframe zero (raw NHTS2009) to columns listed in filter", "# timeit statement print('Execution time: {0:.4f} sec'.format(elapsed)) # %% plotting section # plots", "Laptop #df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) # NHTS2009 Data Location for Alex's Lab", "pd.read_csv(r'C:\\Users\\<NAME>\\Documents\\NHTS_2017\\trippub.CSV', header=0) >>>>>>> Alex1 # filter dataframe zero (raw NHTS2009) to columns listed", "to columns listed in filter df1 = df0.filter(['TDCASEID','TRAVDAY','STRTTIME','DWELTIME','ENDTIME','TRIPPURP', 'WHYFROM','WHYTO','WHYTRP1S','WHYTRP90','WHODROVE', 'CENSUS_D','CENSUS_R','DRIVER','AWAYHOME','FRSTHM','TDTRPNUM', 'TDWKND','TRPACCMP','TRPHHACC','TRVLCMIN','TRVL_MIN','TRWAITTM', 'VEHTYPE','VEHYEAR','VMT_MILE','HHFAMINC','HHSIZE','HHSTATE','HOMEOWN', 'NUMADLT','NUMONTRIP','PRMACT','PAYPROF','PROXY','PRMACT','R_AGE','R_SEX'],", "call to attribute why descriptions with why codes from funcWhyID import funcWhyID [df1,", "axis=1) # function call to attribute why descriptions with why codes from funcWhyID", "# shows last n rows df0['TRIPPURP'].describe() # print data shapes (rows x columns)", "whyIDsumList = set(df1['whyDescSmry']) # build out dataframe table colNames0 = list(df0) # shows", "headers colNames1 = list(df1) # shows all column headers firstNrows0 = df0.head(25) #", "timeit.default_timer() <<<<<<< HEAD # Import NHTS2009 Data df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) =======", "df1.head(25) # shows first n rows lastNrows0 = df0.tail(5) # shows last n", "descriptions with why codes from funcWhyID import funcWhyID [df1, whyID, whyIDsum] = funcWhyID(df1,", "# -*- coding: utf-8 -*- \"\"\" Created on Sun Feb 18 22:57:53 2018", "last n rows df0['TRIPPURP'].describe() # print data shapes (rows x columns) print('Dataframe Raw", "why codes from funcWhyID import funcWhyID [df1, whyID, whyIDsum] = funcWhyID(df1, whyID, whyIDsum)", "data shapes (rows x columns) print('Dataframe Raw Shape:', df0.shape) print('Dataframe Filtered Shape:', df1.shape)", "= timeit.default_timer() <<<<<<< HEAD # Import NHTS2009 Data df0 = pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0)", "= df0.filter(['TDCASEID','TRAVDAY','STRTTIME','DWELTIME','ENDTIME','TRIPPURP', 'WHYFROM','WHYTO','WHYTRP1S','WHYTRP90','WHODROVE', 'CENSUS_D','CENSUS_R','DRIVER','AWAYHOME','FRSTHM','TDTRPNUM', 'TDWKND','TRPACCMP','TRPHHACC','TRVLCMIN','TRVL_MIN','TRWAITTM', 'VEHTYPE','VEHYEAR','VMT_MILE','HHFAMINC','HHSIZE','HHSTATE','HOMEOWN', 'NUMADLT','NUMONTRIP','PRMACT','PAYPROF','PROXY','PRMACT','R_AGE','R_SEX'], axis=1) # function call to attribute", "for Alex's Lab Computer df0 = pd.read_csv(r'C:\\Users\\Alex\\Documents\\NHTS_2017\\trippub.CSV', header=0) #df0 = pd.read_csv(r'C:\\Users\\<NAME>\\Documents\\NHTS_2017\\trippub.CSV', header=0) >>>>>>>", "Alex1 # filter dataframe zero (raw NHTS2009) to columns listed in filter df1", "rows df0['TRIPPURP'].describe() # print data shapes (rows x columns) print('Dataframe Raw Shape:', df0.shape)", "sec'.format(elapsed)) # %% plotting section # plots histogram #plotHistSmry = df1['WHYTRP1S'].hist(bins=25) #plotPieSmry =", "header=0) # NHTS2009 Data Location for Alex's Lab Computer df0 = pd.read_csv(r'C:\\Users\\Alex\\Documents\\NHTS_2017\\trippub.CSV', header=0)", "statement print('Execution time: {0:.4f} sec'.format(elapsed)) # %% plotting section # plots histogram #plotHistSmry", "pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) # NHTS2009 Data Location for Alex's Lab Computer df0 =", "\"\"\" Created on Sun Feb 18 22:57:53 2018 @author: <NAME> \"\"\" # import", "#df0 = pd.read_csv(r'C:\\Users\\<NAME>\\Documents\\NHTS_2017\\trippub.CSV', header=0) >>>>>>> Alex1 # filter dataframe zero (raw NHTS2009) to", "(rows x columns) print('Dataframe Raw Shape:', df0.shape) print('Dataframe Filtered Shape:', df1.shape) elapsed =", "@author: <NAME> \"\"\" # import libraries import pandas as pd import timeit #", "22:57:53 2018 @author: <NAME> \"\"\" # import libraries import pandas as pd import", "out dataframe table colNames0 = list(df0) # shows all column headers colNames1 =", "colNames1 = list(df1) # shows all column headers firstNrows0 = df0.head(25) # shows", "rows firstNrows1 = df1.head(25) # shows first n rows lastNrows0 = df0.tail(5) #", "print('Dataframe Raw Shape:', df0.shape) print('Dataframe Filtered Shape:', df1.shape) elapsed = timeit.default_timer() - start_time", "Sun Feb 18 22:57:53 2018 @author: <NAME> \"\"\" # import libraries import pandas", "= pd.read_csv(r'C:\\Users\\avi_b\\Box\\CS6140 Project\\Data\\CSV\\DAYV2PUB.CSV', header=0) # NHTS2009 Data Location for Alex's Lab Computer df0", "# shows last n rows lastNrows1 = df1.tail(5) # shows last n rows" ]
[ "log: If True, will log the API JSON response. This is optional as", "NotImplementedError(\"Tick range not supported by the Bittrex API.\") async def get_last_values(self, pair: str)", "False elif isinstance(ex, (SyntaxError, NameError)): reason = \"{}: {}\\n{}\".format(type(ex).__name__, ex, ''.join(traceback.format_tb(ex.__traceback__))) retry =", "and API response message (if present). Arguments: data: Dict of the parsed API", "value. Arguments: method: Name of the API method to call. params: Values of", "(SyntaxError, NameError)): reason = \"{}: {}\\n{}\".format(type(ex).__name__, ex, ''.join(traceback.format_tb(ex.__traceback__))) retry = False elif ex", "expr = 'lambda d: d' + item expr_func = eval(expr) # pylint: disable=W0123", "api_message = data['message'] else: api_message = 'empty or missing results' return \"{} ({}:", "async def get_balance(self, base: str): \"\"\" \"\"\" params = [base] results, status =", "retry_reason = 'status {}'.format(status) retry = True else: self.log.error('Got non-retryable status {}.', status)", "\"['result'][0]['Market']['IsActive']\", \"['result'][0]['Market']['Notice']\", \"['result'][0]['Summary']['MarketName']\", \"['result'][0]['Summary']['BaseVolume']\", \"['result'][0]['Summary']['PrevDay']\", \"['result'][0]['Summary']['Last']\", ], 'getMarketSummaries', retry_data=True, retry_fail=True) if status !=", "containing: float: The current close price, or None if an error occurred. float:", "status code. A value of 0 indicates a connection or transport failure. Raises:", "'getMarketSummariesV1': { 'path': 'v1.1/public/getMarketSummaries', 'params': '', 'auth': False }, 'getTicks': { 'path': 'v2.0/pub/market/getTicks',", "representing the dictionary paths of the response data items to extract, eg. [\"['result'][0]['C']\",", "\"\"\" params = [pair, self.tick_interval_str] results, status, = await self.call_extract([ \"['result']\", \"['result'][0]['C']\", #", "or extraction error occurred. Exception: The last exception that occurred during extraction, or", "data) retry = False else: reason = None retry = False return (retry,", "the API. log: If True, will log the API JSON response. This is", "status != 200 or results is None or results[0] is None: self.log.error(\"Failed executing", "API method to call. params: Values of query parameters to pass to the", "data should be retried, false otherwise. Returns: (tuple): A tuple containing: (bool): True", "= await self.call_extract([ \"['result']['uuid']\", ], 'buyLimit', params=params, log=True) if status != 200 or", "reason from the given extraction exception and API response message (if present). Arguments:", "for a currency pair from the API. Arguments: pair: Currency pair name eg.", "pair: str, length: int=None) -> List[Dict[str, Any]]: \"\"\" Get ticks (closing values and", "List, Sequence, Tuple import api import utils import common import configuration import aiohttp", "return results[0] async def get_tick_range(self, pair: str, start_time: float, end_time: float) -> List[Dict[str,", "reason = await self._handle_extract_exception(ex, data, retry_data) if retry: attempt += 1 await common.backoff(attempt,", "be retried. retry_fail: If True, will perform backoff and retry on explicit failure", "(blank message)\" except KeyError: reason = \"success == false (missing message)\" if not", "import time import asyncio import hashlib import traceback from datetime import datetime, timezone", "= data['message'] else: api_message = 'empty or missing results' return \"{} ({}: {})\".format(api_message,", "results, status = await self.call_extract([ \"['result']['uuid']\", ], 'buyLimit', params=params, log=True) if status !=", "Bittrex API. Arguments: pair: The currency pair eg. 'BTC-ETH'. length: Not supported by", "None or results[0] is None: self.log.error(\"Failed executing buy order request: params {}, status", "utf-8 -*- \"\"\" Bittrex API module. \"\"\" __author__ = '<NAME> <$(echo nqnz.enshfr#tznvy.pbz |", "import common import configuration import aiohttp config = configuration.config \"\"\" Global configuration. \"\"\"", "Returns: (tuple): A tuple containing: (str): Full URL for the request. (dict): Dictionary", "to ensure that the specified extract dict keys are correct to avoid repeating", "the raw response body (may be None). On a 200 response with a", "nonce = int(time.time() * 1000) api_key = config['bittrex_api_key'] api_secret = config['bittrex_api_secret'] query =", "\"missing 'success' value\" retry = True if retry: attempt += 1 await common.backoff(attempt,", "from a dictionary of data. Arguments: extract: List of strings representing the dictionary", "a missing response body, None. status (int): The HTTP response status code. A", "specified extract dict keys are correct to avoid repeating of non-idempotent operations (such", "for delisting' in notice: self.log.info(\"{} marked as inactive due to pending removal.\", pair)", "'apikey={}&nonce={}&'.format(api_key, nonce) + query url = API_URL.format(API_METHODS[method]['path'], query) signature = hmac.new(api_secret.encode(), url.encode(), hashlib.sha512).hexdigest()", "Dict[str, Any]): \"\"\" Get the failure reason from the given extraction exception and", "faster lookups. This data is used for batching tick updates, since the v1", "not prev_day: prev_day = last if notice: self.log.info(\"{} NOTICE: {}\", pair, notice) if", "status, results) return None summaries = {} for summary in results[0]: pair =", "(closing values and closing times) for a pair from the Bittrex API. Arguments:", "from the Bittrex API. \"\"\" raise NotImplementedError(\"Tick range not supported by the Bittrex", "<$(echo nqnz.enshfr#tznvy.pbz | tr a-z# n-za-m@)>' __version__ = \"0.2.0\" __all__ = ['Client'] import", "an extract operation. Arguments: ex: Exception returned from :meth:`_extract_items`. data: Dictionary of data", "the request. (dict): Dictionary of headers for the request, or None if no", "JSON response' except KeyError: retry_reason = \"missing 'success' value\" retry = True if", "retry: results, ex = await self._extract_items(extract, data) retry, reason = await self._handle_extract_exception(ex, data,", "on any missing fields \"['result'][0]['Market']['MinTradeSize']\", \"['result'][0]['Market']['IsActive']\", \"['result'][0]['Market']['Notice']\", \"['result'][0]['Summary']['MarketName']\", \"['result'][0]['Summary']['BaseVolume']\", \"['result'][0]['Summary']['PrevDay']\", \"['result'][0]['Summary']['Last']\", ], 'getMarketSummaries',", "None: reason = await Client._get_extract_failure_reason(ex, data) retry = False else: reason = None", "], 'getMarketSummariesV1', retry_data=True) if status == 200 and results is not None and", "disable=W0123 results.append(expr_func(data)) except (TypeError, IndexError, KeyError, SyntaxError, NameError) as e: ex = e", "results, status = await self.call_extract([ \"['result']['Available']\", ], 'getBalance', params=params, log=True, retry_data=True) if status", "times) for a pair from the Bittrex API. \"\"\" raise NotImplementedError(\"Tick range not", "for marketSummariesV1.\", verbosity=1) self.lock.release() return self.cache['marketSummariesV1']['data'] results, status = await self.call_extract([ \"['result']\", \"['result'][0]['Last']\",", "results[0] is None: self.log.error(\"Failed executing sell order request: params {}, status {}, results", "results) return None for tick in results[0]: close_datetime = datetime.strptime(tick['T'], TIME_FORMAT) tick['T'] =", "and calculates any needed HMAC signature to be passed in headers. Arguments: method:", "tuple containing: float: The current close price, or None if an error occurred.", "summaries async def get_ticks(self, pair: str, length: int=None) -> List[Dict[str, Any]]: \"\"\" Get", "self.log.error(\"Failed getting market summaries: status {}, results {}.\", status, results) return None summaries", "from its JSON response. Implements retry and exponential backoff for invalid data items.", "\"\"\" if isinstance(ex, (TypeError, IndexError, KeyError)): reason = await Client._get_extract_failure_reason(ex, data) if retry_data", "results' return \"{} ({}: {})\".format(api_message, type(ex).__name__, ex) async def get_market_summaries(self) -> List[Dict[str, Any]]:", "asyncio.Lock() \"\"\" Lock used for syncing access to API data. \"\"\" self.cache =", "or and unretryable error occurred. (str): Sentence fragment or formatted traceback describing the", "d' + item expr_func = eval(expr) # pylint: disable=W0123 results.append(expr_func(data)) except (TypeError, IndexError,", "as a result of the extraction attempt. \"\"\" if 'message' in data and", "log=True, retry_data=True) if status != 200 or results is None or results[0] is", "from the API. log: If True, will log the API JSON response. This", "is not None: market_summaries = {} for result in results[0]: market_summaries[result['MarketName']] = result", "headers for a given API method and parameter list. Forms the full URL", "= await self.call_json(method, params) if status != 200 or data is None: self.log.error(\"Failed", "@staticmethod async def _extract_items(extract: Sequence[str], data: Dict[str, Any]): \"\"\" Extract items from a", "status {}, results {}.\", status, results) if 'marketSummariesV1' in self.cache: self.cache['marketSummariesV1']['time'] = time.time()", "retry = False attempt = 0 while attempt <= config['api_max_retries']: data, status =", "and retry on empty or missing data items. Syntax errors in extract paths", "\"\"\" raise NotImplementedError(\"Tick range not supported by the Bittrex API.\") async def get_last_values(self,", "self.call_json(method, params) if status != 200 or data is None: self.log.error(\"Failed on API", "None if a syntax or or extraction error occurred. Exception: The last exception", "await Client._get_extract_failure_reason(ex, data) if retry_data and data['success']: retry = True else: retry =", "str, params: Sequence[Any]=None, retry_data=False, retry_fail=False, log=False): \"\"\" Call a Bittrex API method and", "occurred. Exception: The last exception that occurred during extraction, or None if no", "for higher-level API error conditions on a 200 response, specifically empty response body,", "import hmac import json import time import asyncio import hashlib import traceback from", "data items to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] method: Name of the API method", "\"\"\" Get v1 market summaries from the API, cached for the current tick", "non-retryable status {}.', status) data = await response.text() break except (aiohttp.ClientConnectionError, aiohttp.ClientPayloadError, asyncio.TimeoutError)", "({}: {})\".format(api_message, type(ex).__name__, ex) async def get_market_summaries(self) -> List[Dict[str, Any]]: \"\"\" Get the", "empty or missing data items. Syntax errors in extract paths will not be", "retry on empty or missing data items. Syntax errors in extract paths will", "} \"\"\" Response cache. \"\"\" self.tick_interval_str: str \"\"\" String representation of the configured", "return None balance = results[0] self.cache['balance'][base] = { 'time': time.time(), 'data': balance }", "else: api_message = 'empty or missing results' return \"{} ({}: {})\".format(api_message, type(ex).__name__, ex)", "None url, headers = await self._get_request_data(method, params) while attempt < config['http_max_retries']: try: async", "market summaries: status {}, results {}.\", status, results) return None summaries = {}", "reason = await Client._get_extract_failure_reason(ex, data) retry = False else: reason = None retry", "of data to extract items from. Returns: (tuple): A tuple containing: list: Result", "normal 200 response, a tuple containing the values for each extracted item. Any", "0 status = 0 data = None while attempt < config['http_max_retries']: raw_data, status", "'getMarketSummariesV1', retry_data=True) if status == 200 and results is not None and results[0]", "call(self, method: str, params: Sequence[Any]=None): \"\"\" Call a Bittrex API method. Implements retry", "strings representing the dictionary paths of the response data items to extract, eg.", "prev_day, 'last': last, } return summaries async def get_ticks(self, pair: str, length: int=None)", "or None if an error occurred. \"\"\" market_summaries = await self._get_market_summaries_v1() if market_summaries", "not None: market_summaries = {} for result in results[0]: market_summaries[result['MarketName']] = result else:", "0 indicates a connection or transport failure. \"\"\" retry = False attempt =", "extracted after exhausting all retries, or had syntax errors in extract paths will", "results, status = await self.call_extract([ \"['result']['uuid']\", ], 'sellLimit', params=params, log=True, retry_data=True) if status", "present). Arguments: data: Dict of the parsed API response. ex: Exception thrown as", "{}\".format(config['tick_interval_secs'])) async def call(self, method: str, params: Sequence[Any]=None): \"\"\" Call a Bittrex API", "return (retry, reason) @staticmethod async def _get_extract_failure_reason(ex: Exception, data: Dict[str, Any]): \"\"\" Get", "{} for result in results[0]: market_summaries[result['MarketName']] = result else: self.log.error(\"Failed getting v1 market", "\"\"\" Global configuration. \"\"\" TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' API_URL = 'https://bittrex.com/api/{}?{}' API_METHODS = {", "n-za-m@)>' __version__ = \"0.2.0\" __all__ = ['Client'] import hmac import json import time", "to the method. retry_data: If True, will perform backoff and retry on empty", "datetime import datetime, timezone from typing import Any, Dict, List, Sequence, Tuple import", "method '{}({})' response:\\n{}\", method, params, json.dumps(data, indent=2)) if not data['success'] and retry_fail: retry", "Bittrex API method. Implements retry and exponentional backoff for HTTP level error conditions.", "['Client'] import hmac import json import time import asyncio import hashlib import traceback", "retried. retry_fail: If True, will perform backoff and retry on explicit failure response", "error occurred. Exception: The last exception that occurred during extraction, or None if", "notice = summary['Market']['Notice'] last = summary['Summary']['Last'] prev_day = summary['Summary']['PrevDay'] if not prev_day: prev_day", "results {}.\", params, status, results) return None return results[0] async def get_order(self, pair:", "self.log.error(\"Failed getting balance: params {}, status {}, results {}.\", params, status, results) return", "data and data['message'] and data['message'] != '': api_message = data['message'] else: api_message =", "retry = False attempt = 0 status = 0 data = None url,", "list: Result of each extracted path, or None if a syntax or or", "results[0] self.cache['balance'][base] = { 'time': time.time(), 'data': balance } return balance async def", "api import utils import common import configuration import aiohttp config = configuration.config \"\"\"", "Global configuration. \"\"\" TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' API_URL = 'https://bittrex.com/api/{}?{}' API_METHODS = { 'getMarketSummaries':", "'path': 'v1.1/market/cancel', 'params': 'uuid={}', 'auth': True }, 'getOrder': { 'path': 'v1.1/account/getorder', 'params': 'uuid={}',", "getting market summaries: status {}, results {}.\", status, results) return None summaries =", "== 200 and results is not None and results[0] is not None: market_summaries", "attempt. \"\"\" if 'message' in data and data['message'] and data['message'] != '': api_message", "= '{}: {}'.format(type(e).__name__, e) retry = True if retry: attempt += 1 await", "[pair, quantity, value] results, status = await self.call_extract([ \"['result']['uuid']\", ], 'buyLimit', params=params, log=True)", "is kept current (unlike v2). \"\"\" await self.lock.acquire() if 'marketSummariesV1' in self.cache: if", "from the API, cached for the current tick interval. Converts the response list", "reason) retry = False else: break if reason is not None: self.log.error(\"Giving up", "for faster lookups. This data is used for batching tick updates, since the", "response, specifically empty response body, malformed response body (invalid JSON), or missing 'success'", "Any]]: \"\"\" Get ticks (closing values and closing times) for a pair from", "extract: A list of strings representing the dictionary paths of the response data", "value of 0 indicates a connection or transport failure. Raises: SyntaxError, NameError: If", "or results[0] is None: self.log.error(\"Failed getting balance: params {}, status {}, results {}.\",", "retry and exponentional backoff for HTTP level error conditions. Arguments: method: Name of", "balance: params {}, status {}, results {}.\", params, status, results) return None balance", "self.log.error(\"Failed executing sell order request: params {}, status {}, results {}.\", params, status,", "Any]): \"\"\" Get the failure reason from the given extraction exception and API", "buy order request: params {}, status {}, results {}.\", params, status, results) return", "API method '{}({})': status {}, data {}\", method, params, status, data) return (data,", "items from. Returns: (tuple): A tuple containing: list: Result of each extracted path,", "status {}, results {}.\", status, results) return None summaries = {} for summary", "list to a dict for faster lookups. This data is used for batching", "parameters to pass to the method. Returns: (tuple): A tuple containing: data (object):", "params, status, results) return None balance = results[0] self.cache['balance'][base] = { 'time': time.time(),", "None results = [] for item in extract: try: expr = 'lambda d:", "status, results) return None return results[0] async def sell_limit(self, pair: str, quantity: float,", "closing times) for a pair from the Bittrex API. Arguments: pair: The currency", "results, status = await self.call_extract([ \"['success']\", \"['result']['IsOpen']\", \"['result']['Quantity']\", \"['result']['QuantityRemaining']\", \"['result']['PricePerUnit']\", \"['result']['CommissionPaid']\", ], 'getOrder',", "occurred during extraction, or None if no exception occurred. \"\"\" ex = None", "bool): \"\"\" Handle any exception produced from an extract operation. Arguments: ex: Exception", "\"['result']['PricePerUnit']\", \"['result']['CommissionPaid']\", ], 'getOrder', params=params, log=True, retry_data=True) if status != 200 or results", "\"\"\" Bittrex API module. \"\"\" __author__ = '<NAME> <$(echo nqnz.enshfr#tznvy.pbz | tr a-z#", "return summaries async def get_ticks(self, pair: str, length: int=None) -> List[Dict[str, Any]]: \"\"\"", "dictionary of data. Arguments: extract: List of strings representing the dictionary paths of", "session \"\"\" Object HTTP client session. \"\"\" self.log = utils.logging.ChildLogger(parent=log, scope=self) \"\"\" Object", "results) return None balance = results[0] self.cache['balance'][base] = { 'time': time.time(), 'data': balance", "Caution must be taken to ensure that the specified extract dict keys are", "self.log.error('Got non-retryable status {}.', status) data = await response.text() break except (aiohttp.ClientConnectionError, aiohttp.ClientPayloadError,", "None return (market_summaries[pair]['Last'], market_summaries[pair]['BaseVolume']) async def buy_limit(self, pair: str, quantity: float, value: float):", "float): \"\"\" \"\"\" params = [pair, quantity, value] results, status = await self.call_extract([", "try: async with self.session.get(url, headers=headers) as response: status = response.status if status >=", "None or results[0] is None: self.log.error(\"Failed getting ticks: params {}, status {}, results", "log the API JSON response. This is optional as some responses can be", "extract paths will be set to None. On a non-200 response, the raw", "return results[0] async def get_order(self, pair: str, order_id: str): \"\"\" \"\"\" params =", "status = await self.call_extract([ \"['result']['uuid']\", ], 'buyLimit', params=params, log=True) if status != 200", "def call(self, method: str, params: Sequence[Any]=None): \"\"\" Call a Bittrex API method. Implements", "self.cache: if time.time() - self.cache['marketSummariesV1']['time'] < config['tick_interval_secs']: self.log.debug(\"Returning cached data for marketSummariesV1.\", verbosity=1)", "False return (data, status) @staticmethod async def _get_request_data(method: str, params: Sequence[Any]=None): \"\"\" Get", "try: reason = data['message'] if data['message'] != '' else \"success == false (blank", "{}'.format(type(e).__name__, e) retry = True if retry: attempt += 1 await common.backoff(attempt, \"Bittrex", "response. Implements retry and exponential backoff for higher-level API error conditions on a", "results, status = await self.call_extract([ \"['success']\" ], 'cancelOrder', params=params, log=True, retry_data=True) if status", ">= 200 and status <= 399: data = await response.text() break if (status", "to avoid repeating of non-idempotent operations (such as buying or selling) so should", "{}, results {}.\", status, results) return None summaries = {} for summary in", "status) data = await response.text() break except (aiohttp.ClientConnectionError, aiohttp.ClientPayloadError, asyncio.TimeoutError) as e: retry_reason", "occurred. float: The current 24 hour volume, or None if an error occurred.", "[] for item in extract: try: expr = 'lambda d: d' + item", "'buyLimit', params=params, log=True) if status != 200 or results is None or results[0]", "Extract items from a dictionary of data. Arguments: extract: List of strings representing", "list. Forms the full URL with query string and calculates any needed HMAC", "query = 'apikey={}&nonce={}&'.format(api_key, nonce) + query url = API_URL.format(API_METHODS[method]['path'], query) signature = hmac.new(api_secret.encode(),", "status = response.status if status >= 200 and status <= 399: data =", "response body (may be None). On a 200 response with a missing response", "method and extract data items from its JSON response. Implements retry and exponential", "None). status (int): The HTTP response status code. A value of 0 indicates", "nqnz.enshfr#tznvy.pbz | tr a-z# n-za-m@)>' __version__ = \"0.2.0\" __all__ = ['Client'] import hmac", "cache. \"\"\" self.tick_interval_str: str \"\"\" String representation of the configured tick interval. \"\"\"", "the method. Returns: (tuple): A tuple containing: data (str): The raw HTTP response", "[order_id] results, status = await self.call_extract([ \"['success']\", \"['result']['IsOpen']\", \"['result']['Quantity']\", \"['result']['QuantityRemaining']\", \"['result']['PricePerUnit']\", \"['result']['CommissionPaid']\", ],", "Returns: (tuple): A tuple containing: data (object): On a normal 200 response, a", "representation of the configured tick interval. \"\"\" if config['tick_interval_secs'] == 60: self.tick_interval_str =", "Implements retry and exponential backoff for higher-level API error conditions on a 200", "pair: Currency pair name eg. 'BTC-ETH' Returns: (tuple): A tuple containing: float: The", "default) first. Arguments: extract: A list of strings representing the dictionary paths of", "On a non-200 response, the raw response body (may be None). On a", "is None: self.log.error(\"Failed getting ticks: params {}, status {}, results {}.\", params, status,", "'params': '', 'auth': False }, 'getMarketSummariesV1': { 'path': 'v1.1/public/getMarketSummaries', 'params': '', 'auth': False", "headers=headers) as response: status = response.status if status >= 200 and status <=", "retry_reason = 'invalid JSON response' except KeyError: retry_reason = \"missing 'success' value\" retry", "aiohttp config = configuration.config \"\"\" Global configuration. \"\"\" TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' API_URL =", "retry: try: data = json.loads(raw_data) _ = data['success'] return (data, status) except json.JSONDecodeError:", "status) @staticmethod async def _extract_items(extract: Sequence[str], data: Dict[str, Any]): \"\"\" Extract items from", "'invalid JSON response' except KeyError: retry_reason = \"missing 'success' value\" retry = True", "if isinstance(ex, (TypeError, IndexError, KeyError)): reason = await Client._get_extract_failure_reason(ex, data) if retry_data and", "end_time: float) -> List[Dict[str, Any]]: \"\"\" Get a range of ticks (closing values", "str, params: list=None): \"\"\" Call a Bittrex API method and parse JSON response.", "for the current tick interval. Converts the response list to a dict for", "close_datetime = datetime.strptime(tick['T'], TIME_FORMAT) tick['T'] = close_datetime.replace(tzinfo=timezone.utc).timestamp() return results[0] async def get_tick_range(self, pair:", "parameters to pass to the method. retry_data: If True, will perform backoff and", "200 or results is None or results[0] is None: self.log.error(\"Failed executing sell order", "error occurred or no ticks are available. \"\"\" params = [pair, self.tick_interval_str] results,", "None. status (int): The HTTP response status code. A value of 0 indicates", "SyntaxError, NameError: If one or more of the passed extract dict paths contains", "ex) @staticmethod async def _handle_extract_exception(ex: Exception, data: Dict[str, Any], retry_data: bool): \"\"\" Handle", "TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' API_URL = 'https://bittrex.com/api/{}?{}' API_METHODS = { 'getMarketSummaries': { 'path': 'v2.0/pub/markets/getMarketSummaries',", "\"['result'][0]['Market']['MinTradeSize']\", \"['result'][0]['Market']['IsActive']\", \"['result'][0]['Market']['Notice']\", \"['result'][0]['Summary']['MarketName']\", \"['result'][0]['Summary']['BaseVolume']\", \"['result'][0]['Summary']['PrevDay']\", \"['result'][0]['Summary']['Last']\", ], 'getMarketSummaries', retry_data=True, retry_fail=True) if status", "pair: The currency pair eg. 'BTC-ETH'. length: Not supported by the API, will", "common.backoff(attempt, \"Bittrex call_extract {}\".format(method), reason) retry = False else: break if reason is", "= None results = [] for item in extract: try: expr = 'lambda", "ex) async def get_market_summaries(self) -> List[Dict[str, Any]]: \"\"\" Get the market summaries from", "return (raw_data, status) if raw_data is None: retry_reason = \"'None' on successful response\"", "exhausting all retries, or had syntax errors in extract paths will be set", "data['success'] and retry_fail: retry = True try: reason = data['message'] if data['message'] !=", "\"['success']\" ], 'cancelOrder', params=params, log=True, retry_data=True) if status != 200 or results is", "or results is None or results[0] is None: self.log.error(\"Failed executing buy order request:", "paths will be set to None. On a non-200 response, the raw response", "if no exception occurred. \"\"\" ex = None results = [] for item", "= eval(expr) # pylint: disable=W0123 results.append(expr_func(data)) except (TypeError, IndexError, KeyError, SyntaxError, NameError) as", "\"'None' on successful response\" retry = True if not retry: try: data =", "results[0]: market_summaries[result['MarketName']] = result else: self.log.error(\"Failed getting v1 market summaries: status {}, results", "params = [order_id] results, status = await self.call_extract([ \"['success']\" ], 'cancelOrder', params=params, log=True,", "dict keys are correct to avoid repeating of non-idempotent operations (such as buying", "typing import Any, Dict, List, Sequence, Tuple import api import utils import common", "to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] data: Dictionary of data to extract items from.", "\"\"\" if 'message' in data and data['message'] and data['message'] != '': api_message =", "pair = summary['Summary']['MarketName'] active = summary['Market']['IsActive'] notice = summary['Market']['Notice'] last = summary['Summary']['Last'] prev_day", "'auth': False }, 'getTicker': { 'path': 'v1.1/public/getticker', 'params': 'market={}', 'auth': False }, 'buyLimit':", "parameter list. Forms the full URL with query string and calculates any needed", "< config['http_max_retries']: raw_data, status = await self.call(method, params) if status != 200: return", "needed HMAC signature to be passed in headers. Arguments: method: Name of the", "{'apisign': signature} else: url = API_URL.format(API_METHODS[method]['path'], query) headers = None return (url, headers)", "Get a range of ticks (closing values and closing times) for a pair", "'time': time.time(), 'data': balance } return balance async def _get_market_summaries_v1(self): \"\"\" Get v1", "import aiohttp config = configuration.config \"\"\" Global configuration. \"\"\" TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' API_URL", "'params': 'marketName={}&tickInterval={}', 'auth': False }, 'getLatestTick': { 'path': 'v2.0/pub/market/getLatestTick', 'params': 'marketName={}&tickInterval={}', 'auth': False", "is None: self.log.error(\"Failed on API method '{}({})': status {}, data {}\", method, params,", "is None: self.log.error(\"Failed executing buy order request: params {}, status {}, results {}.\",", "extract paths will not be retried. retry_fail: If True, will perform backoff and", "\"['result'][0]['Market']['BaseCurrency']\", # To retry on any missing fields \"['result'][0]['Market']['MinTradeSize']\", \"['result'][0]['Market']['IsActive']\", \"['result'][0]['Market']['Notice']\", \"['result'][0]['Summary']['MarketName']\", \"['result'][0]['Summary']['BaseVolume']\",", "'' else \"success == false (blank message)\" except KeyError: reason = \"success ==", "0 data = None url, headers = await self._get_request_data(method, params) while attempt <", "data['message'] else: api_message = 'empty or missing results' return \"{} ({}: {})\".format(api_message, type(ex).__name__,", "{}.\", params, status, results) return None return results[0] async def get_order(self, pair: str,", "import configuration import aiohttp config = configuration.config \"\"\" Global configuration. \"\"\" TIME_FORMAT =", "order_id: str): \"\"\" \"\"\" params = [order_id] results, status = await self.call_extract([ \"['success']\"", "], 'getBalance', params=params, log=True, retry_data=True) if status != 200 or results is None", "return { 'open': results[1], 'quantity': results[2], 'remaining': results[3], 'value': results[4], 'fees': results[5], }", "= False attempt = 0 status = 0 data = None while attempt", "or (status in [0, 408, 429]): retry_reason = 'status {}'.format(status) retry = True", "data. Arguments: extract: List of strings representing the dictionary paths of the response", "\"['result'][0]['Summary']['Last']\", ], 'getMarketSummaries', retry_data=True, retry_fail=True) if status != 200 or results is None", "\"{} ({}: {})\".format(api_message, type(ex).__name__, ex) async def get_market_summaries(self) -> List[Dict[str, Any]]: \"\"\" Get", "} class Client(api.Client): \"\"\" Client for interacting with the Bittrex API. \"\"\" def", "a tuple containing the values for each extracted item. Any items that failed", "v2). \"\"\" await self.lock.acquire() if 'marketSummariesV1' in self.cache: if time.time() - self.cache['marketSummariesV1']['time'] <", "}, 'getMarketSummariesV1': { 'path': 'v1.1/public/getMarketSummaries', 'params': '', 'auth': False }, 'getTicks': { 'path':", "e) retry = True if retry: attempt += 1 await common.backoff(attempt, \"Bittrex call", "None: self.log.error(\"Giving up on: {}\", reason) return (tuple(results), status) @staticmethod async def _extract_items(extract:", "'fees': results[5], } async def cancel_order(self, pair: str, order_id: str): \"\"\" \"\"\" params", "params: Values of query parameters to pass to the method. Returns: (tuple): A", "market summaries from the API, cached for the current tick interval. Converts the", "{})\".format(api_message, type(ex).__name__, ex) async def get_market_summaries(self) -> List[Dict[str, Any]]: \"\"\" Get the market", "= await self.call(method, params) if status != 200: return (raw_data, status) if raw_data", "or None if no issue occurred. \"\"\" if isinstance(ex, (TypeError, IndexError, KeyError)): reason", "import utils import common import configuration import aiohttp config = configuration.config \"\"\" Global", "tuple containing: list: Result of each extracted path, or None if a syntax", "retry: attempt += 1 await common.backoff(attempt, \"Bittrex call_extract {}\".format(method), reason) retry = False", "Get the failure reason from the given extraction exception and API response message", "d: d' + item expr_func = eval(expr) # pylint: disable=W0123 results.append(expr_func(data)) except (TypeError,", "self.log.error(\"Failed getting order: params{}, status {}, results {}.\", params, status, results) return None", "False else: reason = None retry = False return (retry, reason) @staticmethod async", "A tuple containing: float: The current close price, or None if an error", "the response list to a dict for faster lookups. This data is used", "had syntax errors in extract paths will be set to None. On a", "element exists \"['result'][0]['T']\" ], 'getTicks', params=params, retry_data=True, retry_fail=True) if status != 200 or", "or missing results' return \"{} ({}: {})\".format(api_message, type(ex).__name__, ex) async def get_market_summaries(self) ->", "200 or results is None or results[0] is None: self.log.error(\"Failed getting ticks: params", "of non-idempotent operations (such as buying or selling) so should always be tested", "a non-200 response, the raw response body (may be None). On a response", "def call_extract(self, extract: Sequence[str], method: str, params: Sequence[Any]=None, retry_data=False, retry_fail=False, log=False): \"\"\" Call", "coding: utf-8 -*- \"\"\" Bittrex API module. \"\"\" __author__ = '<NAME> <$(echo nqnz.enshfr#tznvy.pbz", "ticks: params {}, status {}, results {}.\", params, status, results) return None for", "= summary['Summary']['Last'] prev_day = summary['Summary']['PrevDay'] if not prev_day: prev_day = last if notice:", "self._handle_extract_exception(ex, data, retry_data) if retry: attempt += 1 await common.backoff(attempt, \"Bittrex call_extract {}\".format(method),", "# For retry of missing fields \"['result'][0]['BaseVolume']\", \"['result'][0]['PrevDay']\", ], 'getMarketSummariesV1', retry_data=True) if status", "== false (blank message)\" except KeyError: reason = \"success == false (missing message)\"", "are required. \"\"\" query = API_METHODS[method]['params'].format(*params or []) if API_METHODS[method]['auth']: nonce = int(time.time()", "its JSON response. Implements retry and exponential backoff for invalid data items. Caution", "Values of query parameters to pass to the method. Returns: (tuple): A tuple", "!= '' else \"success == false (blank message)\" except KeyError: reason = \"success", "0 indicates a connection or transport failure. Raises: SyntaxError, NameError: If one or", "time import asyncio import hashlib import traceback from datetime import datetime, timezone from", "= json.loads(raw_data) _ = data['success'] return (data, status) except json.JSONDecodeError: retry_reason = 'invalid", "if market_summaries is None: return None return (market_summaries[pair]['Last'], market_summaries[pair]['BaseVolume']) async def buy_limit(self, pair:", "\"0.2.0\" __all__ = ['Client'] import hmac import json import time import asyncio import", "await self._get_request_data(method, params) while attempt < config['http_max_retries']: try: async with self.session.get(url, headers=headers) as", "Sentence fragment or formatted traceback describing the reason for retry or error, or", "self.tick_interval_str] results, status, = await self.call_extract([ \"['result']\", \"['result'][0]['C']\", # To retry if not", "is None: self.log.error(\"Failed executing cancel order request: params {} status {}, results {}.\",", "should always be tested with retry=False (the default) first. Arguments: extract: A list", "given extraction exception and API response message (if present). Arguments: data: Dict of", "Client for interacting with the Bittrex API. \"\"\" def __init__(self, session: aiohttp.ClientSession, log=utils.logging.DummyLogger()):", "[]) if API_METHODS[method]['auth']: nonce = int(time.time() * 1000) api_key = config['bittrex_api_key'] api_secret =", "200 and status <= 399: data = await response.text() break if (status >=", "always be tested with retry=False (the default) first. Arguments: extract: A list of", "on: {}\", reason) return (tuple(results), status) @staticmethod async def _extract_items(extract: Sequence[str], data: Dict[str,", "if not retry: results, ex = await self._extract_items(extract, data) retry, reason = await", "paths of the response data items to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] method: Name", "'https://bittrex.com/api/{}?{}' API_METHODS = { 'getMarketSummaries': { 'path': 'v2.0/pub/markets/getMarketSummaries', 'params': '', 'auth': False },", "{ 'time': time.time(), 'data': balance } return balance async def _get_market_summaries_v1(self): \"\"\" Get", "'status {}'.format(status) retry = True else: self.log.error('Got non-retryable status {}.', status) data =", "config['tick_interval_secs']: self.log.debug(\"Returning cached data for marketSummariesV1.\", verbosity=1) self.lock.release() return self.cache['marketSummariesV1']['data'] results, status =", "to be passed in headers. Arguments: method: Name of the API method to", "retry and exponential backoff for invalid data items. Caution must be taken to", "should be retried, false otherwise. Returns: (tuple): A tuple containing: (bool): True if", "close price, or None if an error occurred. float: The current 24 hour", "always return all ticks. Returns: A list of the raw tick data from", "if missing data should be retried, false otherwise. Returns: (tuple): A tuple containing:", "and status <= 599 and status != 504) or (status in [0, 408,", "results {}.\", status, results) return None summaries = {} for summary in results[0]:", "= await self._get_request_data(method, params) while attempt < config['http_max_retries']: try: async with self.session.get(url, headers=headers)", "False return (retry, reason) @staticmethod async def _get_extract_failure_reason(ex: Exception, data: Dict[str, Any]): \"\"\"", "be delisted' in notice or 'scheduled for delisting' in notice: self.log.info(\"{} marked as", "\"\"\" \"\"\" params = [order_id] results, status = await self.call_extract([ \"['success']\" ], 'cancelOrder',", "or results[0] is None: self.log.error(\"Failed executing buy order request: params {}, status {},", "status {}, data {}\", method, params, status, data) return (data, status) if log:", "API JSON response. This is optional as some responses can be quite large.", "self.log.info(\"{} marked as inactive due to pending removal.\", pair) active = False summaries[pair]", "self.cache['marketSummariesV1']['data'] results, status = await self.call_extract([ \"['result']\", \"['result'][0]['Last']\", # For retry of missing", "method: str, params: Sequence[Any]=None): \"\"\" Call a Bittrex API method. Implements retry and", "if data['message'] != '' else \"success == false (blank message)\" except KeyError: reason", "The last exception that occurred during extraction, or None if no exception occurred.", "True if retry: attempt += 1 await common.backoff(attempt, \"Bittrex call {}\".format(method), retry_reason) retry", "\"\"\" \"\"\" params = [base] results, status = await self.call_extract([ \"['result']['Available']\", ], 'getBalance',", "market_summaries[result['MarketName']] = result else: self.log.error(\"Failed getting v1 market summaries: status {}, results {}.\",", "missing data should be retried, false otherwise. Returns: (tuple): A tuple containing: (bool):", "'v1.1/public/getticker', 'params': 'market={}', 'auth': False }, 'buyLimit': { 'path': 'v1.1/market/buylimit', 'params': 'market={}&quantity={}&rate={}', 'auth':", "for interacting with the Bittrex API. \"\"\" def __init__(self, session: aiohttp.ClientSession, log=utils.logging.DummyLogger()): self.session", "API_METHODS = { 'getMarketSummaries': { 'path': 'v2.0/pub/markets/getMarketSummaries', 'params': '', 'auth': False }, 'getMarketSummariesV1':", "async def get_market_summaries(self) -> List[Dict[str, Any]]: \"\"\" Get the market summaries from the", "params, status, results) return None return results[0] async def sell_limit(self, pair: str, quantity:", "ensure that the specified extract dict keys are correct to avoid repeating of", "in data and data['message'] and data['message'] != '': api_message = data['message'] else: api_message", "{ 'path': 'v2.0/pub/market/getLatestTick', 'params': 'marketName={}&tickInterval={}', 'auth': False }, 'getTicker': { 'path': 'v1.1/public/getticker', 'params':", "= 'oneMin' elif config['tick_interval_secs'] == 300: self.tick_interval_str = 'fiveMin' else: raise ValueError(\"Unsupported tick", "'value': results[4], 'fees': results[5], } async def cancel_order(self, pair: str, order_id: str): \"\"\"", "request: params {}, status {}, results {}.\", params, status, results) return None return", "False attempt = 0 status = 0 data = None url, headers =", "= await response.text() break if (status >= 500 and status <= 599 and", "item in extract: try: expr = 'lambda d: d' + item expr_func =", "= API_URL.format(API_METHODS[method]['path'], query) signature = hmac.new(api_secret.encode(), url.encode(), hashlib.sha512).hexdigest() headers = {'apisign': signature} else:", "= [pair, quantity, value] results, status = await self.call_extract([ \"['result']['uuid']\", ], 'buyLimit', params=params,", "as buying or selling) so should always be tested with retry=False (the default)", "call_json {}\".format(method), retry_reason) retry = False return (data, status) async def call_extract(self, extract:", "if API_METHODS[method]['auth']: nonce = int(time.time() * 1000) api_key = config['bittrex_api_key'] api_secret = config['bittrex_api_secret']", "results[0] async def get_tick_range(self, pair: str, start_time: float, end_time: float) -> List[Dict[str, Any]]:", "getting ticks: params {}, status {}, results {}.\", params, status, results) return None", "}, 'getTicker': { 'path': 'v1.1/public/getticker', 'params': 'market={}', 'auth': False }, 'buyLimit': { 'path':", "'minTradeValue': 0.0, 'baseVolume': summary['Summary']['BaseVolume'], 'prevDay': prev_day, 'last': last, } return summaries async def", "tuple containing: data (object): On a normal 200 response, a tuple containing the", "= \"missing 'success' value\" retry = True if retry: attempt += 1 await", "data items from its JSON response. Implements retry and exponential backoff for invalid", "status) async def call_extract(self, extract: Sequence[str], method: str, params: Sequence[Any]=None, retry_data=False, retry_fail=False, log=False):", "ex: Exception thrown as a result of the extraction attempt. \"\"\" if 'message'", "await self.call_extract([ \"['result']['Available']\", ], 'getBalance', params=params, log=True, retry_data=True) if status != 200 or", "{ 'path': 'v2.0/pub/markets/getMarketSummaries', 'params': '', 'auth': False }, 'getMarketSummariesV1': { 'path': 'v1.1/public/getMarketSummaries', 'params':", "= '%Y-%m-%dT%H:%M:%S' API_URL = 'https://bittrex.com/api/{}?{}' API_METHODS = { 'getMarketSummaries': { 'path': 'v2.0/pub/markets/getMarketSummaries', 'params':", "Values of query parameters to pass to the method. retry_data: If True, will", "Returns: (tuple): A tuple containing: list: Result of each extracted path, or None", "HTTP response status code. A value of 0 indicates a connection or transport", "Any]]: \"\"\" Get the market summaries from the Bittrex API. Returns: The market", "str, order_id: str): \"\"\" \"\"\" params = [order_id] results, status = await self.call_extract([", "method, params, json.dumps(data, indent=2)) if not data['success'] and retry_fail: retry = True try:", "items. Syntax errors in extract paths will not be retried. retry_fail: If True,", "True else: retry = False elif isinstance(ex, (SyntaxError, NameError)): reason = \"{}: {}\\n{}\".format(type(ex).__name__,", "results[4], 'fees': results[5], } async def cancel_order(self, pair: str, order_id: str): \"\"\" \"\"\"", "status {}, results {}.\", params, status, results) return None return { 'open': results[1],", "of the passed extract dict paths contains invalid syntax. \"\"\" retry = False", "24-hour volume for a currency pair from the API. Arguments: pair: Currency pair", "status, results) return None return { 'open': results[1], 'quantity': results[2], 'remaining': results[3], 'value':", "A tuple containing: (str): Full URL for the request. (dict): Dictionary of headers", "if not retry: try: data = json.loads(raw_data) _ = data['success'] return (data, status)", "Implements retry and exponential backoff for invalid data items. Caution must be taken", "\"['result'][0]['Summary']['MarketName']\", \"['result'][0]['Summary']['BaseVolume']\", \"['result'][0]['Summary']['PrevDay']\", \"['result'][0]['Summary']['Last']\", ], 'getMarketSummaries', retry_data=True, retry_fail=True) if status != 200 or", "if 'will be removed' in notice or 'will be delisted' in notice or", "log=True) if status != 200 or results is None or results[0] is None:", "status) @staticmethod async def _get_request_data(method: str, params: Sequence[Any]=None): \"\"\" Get the request URL", "e: ex = e results.append(None) return (results, ex) @staticmethod async def _handle_extract_exception(ex: Exception,", "in headers. Arguments: method: Name of the API method to call. params: Values", "for HTTP level error conditions. Arguments: method: Name of the API method to", "False }, 'getLatestTick': { 'path': 'v2.0/pub/market/getLatestTick', 'params': 'marketName={}&tickInterval={}', 'auth': False }, 'getTicker': {", "'quantity': results[2], 'remaining': results[3], 'value': results[4], 'fees': results[5], } async def cancel_order(self, pair:", "None return results[0] async def get_balance(self, base: str): \"\"\" \"\"\" params = [base]", "(object): On success, a dict containing the parsed JSON response. On a non-200", "24 hour volume, or None if an error occurred. \"\"\" market_summaries = await", "tuple containing: (bool): True if the exception warrants a retry, False if no", "Arguments: ex: Exception returned from :meth:`_extract_items`. data: Dictionary of data passed to :meth:`_extract_items`.", "the values for each extracted item. Any items that failed to be extracted", "float: The current 24 hour volume, or None if an error occurred. \"\"\"", "API_METHODS[method]['auth']: nonce = int(time.time() * 1000) api_key = config['bittrex_api_key'] api_secret = config['bittrex_api_secret'] query", "notice or 'scheduled for delisting' in notice: self.log.info(\"{} marked as inactive due to", "of the raw tick data from the API, or None if an error", "extract: List of strings representing the dictionary paths of the response data items", "= \"0.2.0\" __all__ = ['Client'] import hmac import json import time import asyncio", "retry_data=True, retry_fail=True) if status != 200 or results is None or results[0] is", "The HTTP response status code. A value of 0 indicates a connection or", "(data, status) @staticmethod async def _get_request_data(method: str, params: Sequence[Any]=None): \"\"\" Get the request", "url = API_URL.format(API_METHODS[method]['path'], query) headers = None return (url, headers) async def call_json(self,", "async def get_last_values(self, pair: str) -> Tuple[float, float]: \"\"\" Get the last price", "config['api_max_retries']: data, status = await self.call_json(method, params) if status != 200 or data", "be None). On a 200 response with a missing response body, None. status", "tuple containing: (str): Full URL for the request. (dict): Dictionary of headers for", "{}, results {}.\", params, status, results) return None balance = results[0] self.cache['balance'][base] =", "data: Dict of the parsed API response. ex: Exception thrown as a result", "= \"success == false (missing message)\" if not retry: results, ex = await", "close_datetime.replace(tzinfo=timezone.utc).timestamp() return results[0] async def get_tick_range(self, pair: str, start_time: float, end_time: float) ->", "or missing 'success' value. Arguments: method: Name of the API method to call.", "to call. params: Values of query parameters to pass to the method. Returns:", "'params': 'currency={}', 'auth': True }, } class Client(api.Client): \"\"\" Client for interacting with", "def get_balance(self, base: str): \"\"\" \"\"\" params = [base] results, status = await", "<reponame>arafuse/CryptoWatcher<gh_stars>1-10 # -*- coding: utf-8 -*- \"\"\" Bittrex API module. \"\"\" __author__ =", "extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] data: Dictionary of data to extract items from. Returns:", "= False summaries[pair] = { 'active': active, 'baseCurrency': summary['Market']['BaseCurrency'], 'minTradeQty': summary['Market']['MinTradeSize'], 'minTradeSize': 0.0,", "Bittrex API method and extract data items from its JSON response. Implements retry", "response body, malformed response body (invalid JSON), or missing 'success' value. Arguments: method:", "the parsed JSON response. On a non-200 response, the raw response body (may", "params {}, status {}, results {}.\", params, status, results) return None return results[0]", "Get the last price and 24-hour volume for a currency pair from the", "= await self._handle_extract_exception(ex, data, retry_data) if retry: attempt += 1 await common.backoff(attempt, \"Bittrex", "status, data) return (data, status) if log: self.log.debug(\"API method '{}({})' response:\\n{}\", method, params,", "in extract paths will be set to None. On a non-200 response, the", "0 while attempt <= config['api_max_retries']: data, status = await self.call_json(method, params) if status", "and status != 504) or (status in [0, 408, 429]): retry_reason = 'status", "List[Dict[str, Any]]: \"\"\" Get the market summaries from the Bittrex API. Returns: The", "missing 'success' value. Arguments: method: Name of the API method to call. params:", "ValueError(\"Unsupported tick interval: {}\".format(config['tick_interval_secs'])) async def call(self, method: str, params: Sequence[Any]=None): \"\"\" Call", "\"\"\" results, status, = await self.call_extract([ \"['result']\", \"['result'][0]['Market']['BaseCurrency']\", # To retry on any", "status != 200 or results is None or results[0] is None: self.log.error(\"Failed getting", "to pending removal.\", pair) active = False summaries[pair] = { 'active': active, 'baseCurrency':", "avoid repeating of non-idempotent operations (such as buying or selling) so should always", "from the API. Arguments: pair: Currency pair name eg. 'BTC-ETH' Returns: (tuple): A", "results) return None return results[0] async def get_order(self, pair: str, order_id: str): \"\"\"", "A tuple containing: data (object): On a normal 200 response, a tuple containing", "pair: str) -> Tuple[float, float]: \"\"\" Get the last price and 24-hour volume", "} async def cancel_order(self, pair: str, order_id: str): \"\"\" \"\"\" params = [order_id]", "None). On a response with a missing response body, None. status (int): The", "notice: self.log.info(\"{} marked as inactive due to pending removal.\", pair) active = False", "= configuration.config \"\"\" Global configuration. \"\"\" TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' API_URL = 'https://bittrex.com/api/{}?{}' API_METHODS", "if an error occurred. float: The current 24 hour volume, or None if", "response, a tuple containing the values for each extracted item. Any items that", "marketSummariesV1.\", verbosity=1) self.lock.release() return self.cache['marketSummariesV1']['data'] results, status = await self.call_extract([ \"['result']\", \"['result'][0]['Last']\", #", "not retry: results, ex = await self._extract_items(extract, data) retry, reason = await self._handle_extract_exception(ex,", "pair from the Bittrex API. Arguments: pair: The currency pair eg. 'BTC-ETH'. length:", "of the response data items to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] data: Dictionary of", "method. retry_data: If True, will perform backoff and retry on empty or missing", "summaries = {} for summary in results[0]: pair = summary['Summary']['MarketName'] active = summary['Market']['IsActive']", "the Bittrex API. \"\"\" def __init__(self, session: aiohttp.ClientSession, log=utils.logging.DummyLogger()): self.session = session \"\"\"", "API, cached for the current tick interval. Converts the response list to a", "to pass to the method. Returns: (tuple): A tuple containing: (str): Full URL", "during extraction, or None if no exception occurred. \"\"\" ex = None results", "retry_data and data['success']: retry = True else: retry = False elif isinstance(ex, (SyntaxError,", "= 'https://bittrex.com/api/{}?{}' API_METHODS = { 'getMarketSummaries': { 'path': 'v2.0/pub/markets/getMarketSummaries', 'params': '', 'auth': False", "order request: params {} status {}, results {}.\", params, status, results) return None", "occurred. \"\"\" if isinstance(ex, (TypeError, IndexError, KeyError)): reason = await Client._get_extract_failure_reason(ex, data) if", "a connection or transport failure. \"\"\" retry = False attempt = 0 status", "sell order request: params {}, status {}, results {}.\", params, status, results) return", "'path': 'v1.1/public/getticker', 'params': 'market={}', 'auth': False }, 'buyLimit': { 'path': 'v1.1/market/buylimit', 'params': 'market={}&quantity={}&rate={}',", "nonce) + query url = API_URL.format(API_METHODS[method]['path'], query) signature = hmac.new(api_secret.encode(), url.encode(), hashlib.sha512).hexdigest() headers", "cached for the current tick interval. Converts the response list to a dict", "results is None or not results[0]: self.log.error(\"Failed getting order: params{}, status {}, results", "quantity: float, value: float): \"\"\" \"\"\" params = [pair, quantity, value] results, status", "code. A value of 0 indicates a connection or transport failure. Raises: SyntaxError,", "(tuple(results), status) @staticmethod async def _extract_items(extract: Sequence[str], data: Dict[str, Any]): \"\"\" Extract items", "async def call_json(self, method: str, params: list=None): \"\"\" Call a Bittrex API method", "in results[0]: pair = summary['Summary']['MarketName'] active = summary['Market']['IsActive'] notice = summary['Market']['Notice'] last =", "data passed to :meth:`_extract_items`. retry_data: True if missing data should be retried, false", "aiohttp.ClientSession, log=utils.logging.DummyLogger()): self.session = session \"\"\" Object HTTP client session. \"\"\" self.log =", "results {}.\", params, status, results) return None return results[0] async def sell_limit(self, pair:", "retry_data: True if missing data should be retried, false otherwise. Returns: (tuple): A", "\"\"\" Handle any exception produced from an extract operation. Arguments: ex: Exception returned", "params: Values of query parameters to pass to the method. retry_data: If True,", "} return balance async def _get_market_summaries_v1(self): \"\"\" Get v1 market summaries from the", "200 response with a missing response body, None. status (int): The HTTP response", "and retry on explicit failure response from the API. log: If True, will", "\"\"\" Get the last price and 24-hour volume for a currency pair from", "tick updates, since the v1 API is kept current (unlike v2). \"\"\" await", "None if no issue occurred. \"\"\" if isinstance(ex, (TypeError, IndexError, KeyError)): reason =", "used for syncing access to API data. \"\"\" self.cache = { 'balance': {}", "data {}\", method, params, status, data) return (data, status) if log: self.log.debug(\"API method", "'market={}&quantity={}&rate={}', 'auth': True }, 'cancelOrder': { 'path': 'v1.1/market/cancel', 'params': 'uuid={}', 'auth': True },", "will perform backoff and retry on empty or missing data items. Syntax errors", "= True else: self.log.error('Got non-retryable status {}.', status) data = await response.text() break", "results[0]: pair = summary['Summary']['MarketName'] active = summary['Market']['IsActive'] notice = summary['Market']['Notice'] last = summary['Summary']['Last']", "Handle any exception produced from an extract operation. Arguments: ex: Exception returned from", "'market={}&quantity={}&rate={}', 'auth': True }, 'sellLimit': { 'path': 'v1.1/market/selllimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True },", "response, the raw response body (may be None). On a 200 response with", "value: float): \"\"\" \"\"\" params = [pair, quantity, value] results, status = await", "retry = False return (retry, reason) @staticmethod async def _get_extract_failure_reason(ex: Exception, data: Dict[str,", "status = await self.call_json(method, params) if status != 200 or data is None:", "to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] method: Name of the API method to call.", "async def sell_limit(self, pair: str, quantity: float, value: float): \"\"\" \"\"\" params =", "'sellLimit', params=params, log=True, retry_data=True) if status != 200 or results is None or", "_get_extract_failure_reason(ex: Exception, data: Dict[str, Any]): \"\"\" Get the failure reason from the given", "def get_ticks(self, pair: str, length: int=None) -> List[Dict[str, Any]]: \"\"\" Get ticks (closing", "results[0] is not None: market_summaries = {} for result in results[0]: market_summaries[result['MarketName']] =", "status = await self.call_extract([ \"['success']\" ], 'cancelOrder', params=params, log=True, retry_data=True) if status !=", "API response. ex: Exception thrown as a result of the extraction attempt. \"\"\"", "error conditions on a 200 response, specifically empty response body, malformed response body", "attempt <= config['api_max_retries']: data, status = await self.call_json(method, params) if status != 200", "tick['T'] = close_datetime.replace(tzinfo=timezone.utc).timestamp() return results[0] async def get_tick_range(self, pair: str, start_time: float, end_time:", "List[Dict[str, Any]]: \"\"\" Get a range of ticks (closing values and closing times)", "current 24 hour volume, or None if an error occurred. \"\"\" market_summaries =", "response. This is optional as some responses can be quite large. Returns: (tuple):", "True }, } class Client(api.Client): \"\"\" Client for interacting with the Bittrex API.", "response:\\n{}\", method, params, json.dumps(data, indent=2)) if not data['success'] and retry_fail: retry = True", "\"\"\" Object HTTP client session. \"\"\" self.log = utils.logging.ChildLogger(parent=log, scope=self) \"\"\" Object logger.", "no headers are required. \"\"\" query = API_METHODS[method]['params'].format(*params or []) if API_METHODS[method]['auth']: nonce", "utils.logging.ChildLogger(parent=log, scope=self) \"\"\" Object logger. \"\"\" self.lock = asyncio.Lock() \"\"\" Lock used for", "aiohttp.ClientPayloadError, asyncio.TimeoutError) as e: retry_reason = '{}: {}'.format(type(e).__name__, e) retry = True if", "!= '': api_message = data['message'] else: api_message = 'empty or missing results' return", "non-idempotent operations (such as buying or selling) so should always be tested with", "'marketName={}&tickInterval={}', 'auth': False }, 'getTicker': { 'path': 'v1.1/public/getticker', 'params': 'market={}', 'auth': False },", "'params': 'market={}&quantity={}&rate={}', 'auth': True }, 'cancelOrder': { 'path': 'v1.1/market/cancel', 'params': 'uuid={}', 'auth': True", "= await self._get_market_summaries_v1() if market_summaries is None: return None return (market_summaries[pair]['Last'], market_summaries[pair]['BaseVolume']) async", "session: aiohttp.ClientSession, log=utils.logging.DummyLogger()): self.session = session \"\"\" Object HTTP client session. \"\"\" self.log", "method. Returns: (tuple): A tuple containing: data (str): The raw HTTP response body", "be taken to ensure that the specified extract dict keys are correct to", "params = [base] results, status = await self.call_extract([ \"['result']['Available']\", ], 'getBalance', params=params, log=True,", "hmac import json import time import asyncio import hashlib import traceback from datetime", "import traceback from datetime import datetime, timezone from typing import Any, Dict, List,", "retry_data=True) if status == 200 and results is not None and results[0] is", "'v1.1/account/getorder', 'params': 'uuid={}', 'auth': True }, 'getBalance': { 'path': 'v1.1/account/getbalance', 'params': 'currency={}', 'auth':", "A list of strings representing the dictionary paths of the response data items", "summary in results[0]: pair = summary['Summary']['MarketName'] active = summary['Market']['IsActive'] notice = summary['Market']['Notice'] last", "the response data items to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] data: Dictionary of data", "= await self.call_extract([ \"['success']\" ], 'cancelOrder', params=params, log=True, retry_data=True) if status != 200", "parameters to pass to the method. Returns: (tuple): A tuple containing: data (str):", "items from its JSON response. Implements retry and exponential backoff for invalid data", "None or results[0] is None: self.log.error(\"Failed getting balance: params {}, status {}, results", "\"\"\" Response cache. \"\"\" self.tick_interval_str: str \"\"\" String representation of the configured tick", "(object): On a normal 200 response, a tuple containing the values for each", "API. Arguments: pair: The currency pair eg. 'BTC-ETH'. length: Not supported by the", "if status != 200 or results is None or not results[0]: self.log.error(\"Failed getting", "describing the reason for retry or error, or None if no issue occurred.", "= True if retry: attempt += 1 await common.backoff(attempt, \"Bittrex call {}\".format(method), retry_reason)", "{ 'path': 'v1.1/account/getbalance', 'params': 'currency={}', 'auth': True }, } class Client(api.Client): \"\"\" Client", "(missing message)\" if not retry: results, ex = await self._extract_items(extract, data) retry, reason", "retry: attempt += 1 await common.backoff(attempt, \"Bittrex call_json {}\".format(method), retry_reason) retry = False", "= datetime.strptime(tick['T'], TIME_FORMAT) tick['T'] = close_datetime.replace(tzinfo=timezone.utc).timestamp() return results[0] async def get_tick_range(self, pair: str,", "None: self.log.error(\"Failed on API method '{}({})': status {}, data {}\", method, params, status,", "None: self.log.error(\"Failed getting balance: params {}, status {}, results {}.\", params, status, results)", "eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] method: Name of the API method to call. params: Values", "the reason for retry or error, or None if no issue occurred. \"\"\"", "\"\"\" TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' API_URL = 'https://bittrex.com/api/{}?{}' API_METHODS = { 'getMarketSummaries': { 'path':", "last exception that occurred during extraction, or None if no exception occurred. \"\"\"", "be set to None. On a non-200 response, the raw response body (may", "and exponential backoff for higher-level API error conditions on a 200 response, specifically", "self.lock.release() return self.cache['marketSummariesV1']['data'] results, status = await self.call_extract([ \"['result']\", \"['result'][0]['Last']\", # For retry", "= await self.call_extract([ \"['result']['Available']\", ], 'getBalance', params=params, log=True, retry_data=True) if status != 200", "taken to ensure that the specified extract dict keys are correct to avoid", "Returns: (tuple): A tuple containing: data (str): The raw HTTP response body (may", "async def buy_limit(self, pair: str, quantity: float, value: float): \"\"\" \"\"\" params =", "\"\"\" Get ticks (closing values and closing times) for a pair from the", "params=params, log=True) if status != 200 or results is None or results[0] is", "\"\"\" Get the request URL and headers for a given API method and", "for batching tick updates, since the v1 API is kept current (unlike v2).", "+= 1 await common.backoff(attempt, \"Bittrex call_extract {}\".format(method), reason) retry = False else: break", "List of strings representing the dictionary paths of the response data items to", "closing times) for a pair from the Bittrex API. \"\"\" raise NotImplementedError(\"Tick range", "will perform backoff and retry on explicit failure response from the API. log:", "= False elif isinstance(ex, (SyntaxError, NameError)): reason = \"{}: {}\\n{}\".format(type(ex).__name__, ex, ''.join(traceback.format_tb(ex.__traceback__))) retry", "< config['tick_interval_secs']: self.log.debug(\"Returning cached data for marketSummariesV1.\", verbosity=1) self.lock.release() return self.cache['marketSummariesV1']['data'] results, status", "with a missing response body, None. status (int): The HTTP response status code.", "'', 'auth': False }, 'getMarketSummariesV1': { 'path': 'v1.1/public/getMarketSummaries', 'params': '', 'auth': False },", "Get the market summaries from the Bittrex API. Returns: The market summaries dict.", "retry on any missing fields \"['result'][0]['Market']['MinTradeSize']\", \"['result'][0]['Market']['IsActive']\", \"['result'][0]['Market']['Notice']\", \"['result'][0]['Summary']['MarketName']\", \"['result'][0]['Summary']['BaseVolume']\", \"['result'][0]['Summary']['PrevDay']\", \"['result'][0]['Summary']['Last']\", ],", "{}, status {}, results {}.\", params, status, results) return None for tick in", "\"['result']['IsOpen']\", \"['result']['Quantity']\", \"['result']['QuantityRemaining']\", \"['result']['PricePerUnit']\", \"['result']['CommissionPaid']\", ], 'getOrder', params=params, log=True, retry_data=True) if status !=", "\"\"\" def __init__(self, session: aiohttp.ClientSession, log=utils.logging.DummyLogger()): self.session = session \"\"\" Object HTTP client", "= int(time.time() * 1000) api_key = config['bittrex_api_key'] api_secret = config['bittrex_api_secret'] query = 'apikey={}&nonce={}&'.format(api_key,", "headers) async def call_json(self, method: str, params: list=None): \"\"\" Call a Bittrex API", "{}.\", params, status, results) return None return results[0] async def sell_limit(self, pair: str,", "retry = True if retry: attempt += 1 await common.backoff(attempt, \"Bittrex call_json {}\".format(method),", "self.call_extract([ \"['result']['Available']\", ], 'getBalance', params=params, log=True, retry_data=True) if status != 200 or results", "repeating of non-idempotent operations (such as buying or selling) so should always be", "full URL with query string and calculates any needed HMAC signature to be", "and results[0] is not None: market_summaries = {} for result in results[0]: market_summaries[result['MarketName']]", "Tuple import api import utils import common import configuration import aiohttp config =", "async def _get_extract_failure_reason(ex: Exception, data: Dict[str, Any]): \"\"\" Get the failure reason from", "await self._handle_extract_exception(ex, data, retry_data) if retry: attempt += 1 await common.backoff(attempt, \"Bittrex call_extract", "quantity, value] results, status = await self.call_extract([ \"['result']['uuid']\", ], 'sellLimit', params=params, log=True, retry_data=True)", "\"['result'][0]['Summary']['BaseVolume']\", \"['result'][0]['Summary']['PrevDay']\", \"['result'][0]['Summary']['Last']\", ], 'getMarketSummaries', retry_data=True, retry_fail=True) if status != 200 or results", "results {}.\", status, results) if 'marketSummariesV1' in self.cache: self.cache['marketSummariesV1']['time'] = time.time() self.lock.release() return", "be quite large. Returns: (tuple): A tuple containing: data (object): On a normal", "config['bittrex_api_key'] api_secret = config['bittrex_api_secret'] query = 'apikey={}&nonce={}&'.format(api_key, nonce) + query url = API_URL.format(API_METHODS[method]['path'],", "if retry: attempt += 1 await common.backoff(attempt, \"Bittrex call_json {}\".format(method), retry_reason) retry =", "status code. A value of 0 indicates a connection or transport failure. \"\"\"", "if status != 200 or results is None or results[0] is None: self.log.error(\"Failed", "= await self.call_extract([ \"['result']\", \"['result'][0]['C']\", # To retry if not at least one", "data = await response.text() break if (status >= 500 and status <= 599", "Dict, List, Sequence, Tuple import api import utils import common import configuration import", "'v2.0/pub/market/getLatestTick', 'params': 'marketName={}&tickInterval={}', 'auth': False }, 'getTicker': { 'path': 'v1.1/public/getticker', 'params': 'market={}', 'auth':", "backoff and retry on explicit failure response from the API. log: If True,", "otherwise. Returns: (tuple): A tuple containing: (bool): True if the exception warrants a", "are available. \"\"\" params = [pair, self.tick_interval_str] results, status, = await self.call_extract([ \"['result']\",", "Bittrex API. \"\"\" raise NotImplementedError(\"Tick range not supported by the Bittrex API.\") async", "containing: data (str): The raw HTTP response body (may be None). status (int):", "retry_reason = \"missing 'success' value\" retry = True if retry: attempt += 1", "data['message'] != '' else \"success == false (blank message)\" except KeyError: reason =", "\"['result']['CommissionPaid']\", ], 'getOrder', params=params, log=True, retry_data=True) if status != 200 or results is", "| tr a-z# n-za-m@)>' __version__ = \"0.2.0\" __all__ = ['Client'] import hmac import", "of missing fields \"['result'][0]['BaseVolume']\", \"['result'][0]['PrevDay']\", ], 'getMarketSummariesV1', retry_data=True) if status == 200 and", "json import time import asyncio import hashlib import traceback from datetime import datetime,", ">= 500 and status <= 599 and status != 504) or (status in", "'marketName={}&tickInterval={}', 'auth': False }, 'getLatestTick': { 'path': 'v2.0/pub/market/getLatestTick', 'params': 'marketName={}&tickInterval={}', 'auth': False },", "configuration import aiohttp config = configuration.config \"\"\" Global configuration. \"\"\" TIME_FORMAT = '%Y-%m-%dT%H:%M:%S'", "response. On a non-200 response, the raw response body (may be None). On", "retry = True else: self.log.error('Got non-retryable status {}.', status) data = await response.text()", "''.join(traceback.format_tb(ex.__traceback__))) retry = False elif ex is not None: reason = await Client._get_extract_failure_reason(ex,", "if no issue occurred. \"\"\" if isinstance(ex, (TypeError, IndexError, KeyError)): reason = await", "headers = None return (url, headers) async def call_json(self, method: str, params: list=None):", "# pylint: disable=W0123 results.append(expr_func(data)) except (TypeError, IndexError, KeyError, SyntaxError, NameError) as e: ex", "of the response data items to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] method: Name of", "body (invalid JSON), or missing 'success' value. Arguments: method: Name of the API", "!= 200 or results is None or not results[0]: self.log.error(\"Failed getting order: params{},", "items that failed to be extracted after exhausting all retries, or had syntax", "the request, or None if no headers are required. \"\"\" query = API_METHODS[method]['params'].format(*params", "response' except KeyError: retry_reason = \"missing 'success' value\" retry = True if retry:", "NameError)): reason = \"{}: {}\\n{}\".format(type(ex).__name__, ex, ''.join(traceback.format_tb(ex.__traceback__))) retry = False elif ex is", "= summary['Summary']['MarketName'] active = summary['Market']['IsActive'] notice = summary['Market']['Notice'] last = summary['Summary']['Last'] prev_day =", "data = None url, headers = await self._get_request_data(method, params) while attempt < config['http_max_retries']:", "'getOrder', params=params, log=True, retry_data=True) if status != 200 or results is None or", "self.lock.release() return self.cache['marketSummariesV1']['data'] else: self.lock.release() return None self.cache['marketSummariesV1'] = { 'time': time.time(), 'data':", "missing results' return \"{} ({}: {})\".format(api_message, type(ex).__name__, ex) async def get_market_summaries(self) -> List[Dict[str,", "false (blank message)\" except KeyError: reason = \"success == false (missing message)\" if", "raw response body (may be None). On a 200 response with a missing", "from the given extraction exception and API response message (if present). Arguments: data:", "\"\"\" ex = None results = [] for item in extract: try: expr", "This is optional as some responses can be quite large. Returns: (tuple): A", "(bool): True if the exception warrants a retry, False if no error or", "None if an error occurred. float: The current 24 hour volume, or None", "'minTradeQty': summary['Market']['MinTradeSize'], 'minTradeSize': 0.0, 'minTradeValue': 0.0, 'baseVolume': summary['Summary']['BaseVolume'], 'prevDay': prev_day, 'last': last, }", "'marketSummariesV1' in self.cache: self.cache['marketSummariesV1']['time'] = time.time() self.lock.release() return self.cache['marketSummariesV1']['data'] else: self.lock.release() return None", "Exception, data: Dict[str, Any], retry_data: bool): \"\"\" Handle any exception produced from an", "'oneMin' elif config['tick_interval_secs'] == 300: self.tick_interval_str = 'fiveMin' else: raise ValueError(\"Unsupported tick interval:", "code. A value of 0 indicates a connection or transport failure. \"\"\" retry", "-> List[Dict[str, Any]]: \"\"\" Get a range of ticks (closing values and closing", "status {}, results {}.\", params, status, results) return None return results[0] async def", "retry on explicit failure response from the API. log: If True, will log", "Exception, data: Dict[str, Any]): \"\"\" Get the failure reason from the given extraction", "summaries dict. \"\"\" results, status, = await self.call_extract([ \"['result']\", \"['result'][0]['Market']['BaseCurrency']\", # To retry", "api_secret = config['bittrex_api_secret'] query = 'apikey={}&nonce={}&'.format(api_key, nonce) + query url = API_URL.format(API_METHODS[method]['path'], query)", "if retry: attempt += 1 await common.backoff(attempt, \"Bittrex call_extract {}\".format(method), reason) retry =", "Returns: (tuple): A tuple containing: data (object): On success, a dict containing the", "!= 200: return (raw_data, status) if raw_data is None: retry_reason = \"'None' on", "if retry_data and data['success']: retry = True else: retry = False elif isinstance(ex,", "is None or results[0] is None: self.log.error(\"Failed executing sell order request: params {},", "retry=False (the default) first. Arguments: extract: A list of strings representing the dictionary", "response. Implements retry and exponential backoff for invalid data items. Caution must be", "= await self._extract_items(extract, data) retry, reason = await self._handle_extract_exception(ex, data, retry_data) if retry:", "await self.call_extract([ \"['success']\" ], 'cancelOrder', params=params, log=True, retry_data=True) if status != 200 or", "False }, 'getTicker': { 'path': 'v1.1/public/getticker', 'params': 'market={}', 'auth': False }, 'buyLimit': {", "due to pending removal.\", pair) active = False summaries[pair] = { 'active': active,", "to call. params: Values of query parameters to pass to the method. retry_data:", "True }, 'getOrder': { 'path': 'v1.1/account/getorder', 'params': 'uuid={}', 'auth': True }, 'getBalance': {", "self.call_extract([ \"['result']\", \"['result'][0]['Market']['BaseCurrency']\", # To retry on any missing fields \"['result'][0]['Market']['MinTradeSize']\", \"['result'][0]['Market']['IsActive']\", \"['result'][0]['Market']['Notice']\",", "response message (if present). Arguments: data: Dict of the parsed API response. ex:", "get_last_values(self, pair: str) -> Tuple[float, float]: \"\"\" Get the last price and 24-hour", "on API method '{}({})': status {}, data {}\", method, params, status, data) return", "while attempt < config['http_max_retries']: try: async with self.session.get(url, headers=headers) as response: status =", "'auth': True }, } class Client(api.Client): \"\"\" Client for interacting with the Bittrex", "params, status, results) return None return { 'open': results[1], 'quantity': results[2], 'remaining': results[3],", "logger. \"\"\" self.lock = asyncio.Lock() \"\"\" Lock used for syncing access to API", "self.log.error(\"Failed executing cancel order request: params {} status {}, results {}.\", params, status,", "dictionary paths of the response data items to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] method:", "hmac.new(api_secret.encode(), url.encode(), hashlib.sha512).hexdigest() headers = {'apisign': signature} else: url = API_URL.format(API_METHODS[method]['path'], query) headers", "None: self.log.error(\"Failed executing buy order request: params {}, status {}, results {}.\", params,", "True, will perform backoff and retry on empty or missing data items. Syntax", "interval. \"\"\" if config['tick_interval_secs'] == 60: self.tick_interval_str = 'oneMin' elif config['tick_interval_secs'] == 300:", "parsed JSON response. On a non-200 response, the raw response body (may be", "\"\"\" \"\"\" params = [order_id] results, status = await self.call_extract([ \"['success']\", \"['result']['IsOpen']\", \"['result']['Quantity']\",", "for result in results[0]: market_summaries[result['MarketName']] = result else: self.log.error(\"Failed getting v1 market summaries:", "is None or results[0] is None: self.log.error(\"Failed getting balance: params {}, status {},", "to a dict for faster lookups. This data is used for batching tick", "'market={}', 'auth': False }, 'buyLimit': { 'path': 'v1.1/market/buylimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True },", "to pass to the method. Returns: (tuple): A tuple containing: data (str): The", "last if notice: self.log.info(\"{} NOTICE: {}\", pair, notice) if 'will be removed' in", "order_id: str): \"\"\" \"\"\" params = [order_id] results, status = await self.call_extract([ \"['success']\",", "not supported by the Bittrex API.\") async def get_last_values(self, pair: str) -> Tuple[float,", "pair) active = False summaries[pair] = { 'active': active, 'baseCurrency': summary['Market']['BaseCurrency'], 'minTradeQty': summary['Market']['MinTradeSize'],", "or results is None or results[0] is None: self.log.error(\"Failed getting market summaries: status", "{ 'path': 'v1.1/public/getticker', 'params': 'market={}', 'auth': False }, 'buyLimit': { 'path': 'v1.1/market/buylimit', 'params':", "300: self.tick_interval_str = 'fiveMin' else: raise ValueError(\"Unsupported tick interval: {}\".format(config['tick_interval_secs'])) async def call(self,", "of the extraction attempt. \"\"\" if 'message' in data and data['message'] and data['message']", "= summary['Summary']['PrevDay'] if not prev_day: prev_day = last if notice: self.log.info(\"{} NOTICE: {}\",", "-> List[Dict[str, Any]]: \"\"\" Get ticks (closing values and closing times) for a", "headers are required. \"\"\" query = API_METHODS[method]['params'].format(*params or []) if API_METHODS[method]['auth']: nonce =", "not at least one element exists \"['result'][0]['T']\" ], 'getTicks', params=params, retry_data=True, retry_fail=True) if", "extract dict paths contains invalid syntax. \"\"\" retry = False attempt = 0", "return (data, status) except json.JSONDecodeError: retry_reason = 'invalid JSON response' except KeyError: retry_reason", "one element exists \"['result'][0]['T']\" ], 'getTicks', params=params, retry_data=True, retry_fail=True) if status != 200", "], 'cancelOrder', params=params, log=True, retry_data=True) if status != 200 or results is None", "order: params{}, status {}, results {}.\", params, status, results) return None return {", "market_summaries[pair]['BaseVolume']) async def buy_limit(self, pair: str, quantity: float, value: float): \"\"\" \"\"\" params", "= e results.append(None) return (results, ex) @staticmethod async def _handle_extract_exception(ex: Exception, data: Dict[str,", "keys are correct to avoid repeating of non-idempotent operations (such as buying or", "pass to the method. Returns: (tuple): A tuple containing: data (object): On success,", "quite large. Returns: (tuple): A tuple containing: data (object): On a normal 200", "params {}, status {}, results {}.\", params, status, results) return None for tick", "data = json.loads(raw_data) _ = data['success'] return (data, status) except json.JSONDecodeError: retry_reason =", "data, status = await self.call_json(method, params) if status != 200 or data is", "self.tick_interval_str: str \"\"\" String representation of the configured tick interval. \"\"\" if config['tick_interval_secs']", "Sequence, Tuple import api import utils import common import configuration import aiohttp config", "self.log = utils.logging.ChildLogger(parent=log, scope=self) \"\"\" Object logger. \"\"\" self.lock = asyncio.Lock() \"\"\" Lock", "results[0] is None: self.log.error(\"Failed getting ticks: params {}, status {}, results {}.\", params,", "str, quantity: float, value: float): \"\"\" \"\"\" params = [pair, quantity, value] results,", "a given API method and parameter list. Forms the full URL with query", "utils import common import configuration import aiohttp config = configuration.config \"\"\" Global configuration.", "reason) return (tuple(results), status) @staticmethod async def _extract_items(extract: Sequence[str], data: Dict[str, Any]): \"\"\"", "Exception: The last exception that occurred during extraction, or None if no exception", "extraction exception and API response message (if present). Arguments: data: Dict of the", "if 'message' in data and data['message'] and data['message'] != '': api_message = data['message']", "no error or and unretryable error occurred. (str): Sentence fragment or formatted traceback", "data for marketSummariesV1.\", verbosity=1) self.lock.release() return self.cache['marketSummariesV1']['data'] results, status = await self.call_extract([ \"['result']\",", "transport failure. Raises: SyntaxError, NameError: If one or more of the passed extract", "module. \"\"\" __author__ = '<NAME> <$(echo nqnz.enshfr#tznvy.pbz | tr a-z# n-za-m@)>' __version__ =", "on successful response\" retry = True if not retry: try: data = json.loads(raw_data)", "'success' value\" retry = True if retry: attempt += 1 await common.backoff(attempt, \"Bittrex", "Arguments: method: Name of the API method to call. params: Values of query", "result else: self.log.error(\"Failed getting v1 market summaries: status {}, results {}.\", status, results)", "status <= 599 and status != 504) or (status in [0, 408, 429]):", "Full URL for the request. (dict): Dictionary of headers for the request, or", "the API. Arguments: pair: Currency pair name eg. 'BTC-ETH' Returns: (tuple): A tuple", "'auth': True }, 'cancelOrder': { 'path': 'v1.1/market/cancel', 'params': 'uuid={}', 'auth': True }, 'getOrder':", "}, 'getLatestTick': { 'path': 'v2.0/pub/market/getLatestTick', 'params': 'marketName={}&tickInterval={}', 'auth': False }, 'getTicker': { 'path':", "{}\", reason) return (tuple(results), status) @staticmethod async def _extract_items(extract: Sequence[str], data: Dict[str, Any]):", "in results[0]: market_summaries[result['MarketName']] = result else: self.log.error(\"Failed getting v1 market summaries: status {},", "status = 0 data = None while attempt < config['http_max_retries']: raw_data, status =", "True if the exception warrants a retry, False if no error or and", "or None if an error occurred. float: The current 24 hour volume, or", "paths of the response data items to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] data: Dictionary", "more of the passed extract dict paths contains invalid syntax. \"\"\" retry =", "= await self.call_extract([ \"['success']\", \"['result']['IsOpen']\", \"['result']['Quantity']\", \"['result']['QuantityRemaining']\", \"['result']['PricePerUnit']\", \"['result']['CommissionPaid']\", ], 'getOrder', params=params, log=True,", "await self._extract_items(extract, data) retry, reason = await self._handle_extract_exception(ex, data, retry_data) if retry: attempt", "NameError: If one or more of the passed extract dict paths contains invalid", "'active': active, 'baseCurrency': summary['Market']['BaseCurrency'], 'minTradeQty': summary['Market']['MinTradeSize'], 'minTradeSize': 0.0, 'minTradeValue': 0.0, 'baseVolume': summary['Summary']['BaseVolume'], 'prevDay':", "} return summaries async def get_ticks(self, pair: str, length: int=None) -> List[Dict[str, Any]]:", "self.log.info(\"{} NOTICE: {}\", pair, notice) if 'will be removed' in notice or 'will", "level error conditions. Arguments: method: Name of the API method to call. params:", "results is None or results[0] is None: self.log.error(\"Failed executing sell order request: params", "conditions. Arguments: method: Name of the API method to call. params: Values of", "async def _extract_items(extract: Sequence[str], data: Dict[str, Any]): \"\"\" Extract items from a dictionary", "given API method and parameter list. Forms the full URL with query string", "string and calculates any needed HMAC signature to be passed in headers. Arguments:", "return None return (market_summaries[pair]['Last'], market_summaries[pair]['BaseVolume']) async def buy_limit(self, pair: str, quantity: float, value:", "used for batching tick updates, since the v1 API is kept current (unlike", "hashlib.sha512).hexdigest() headers = {'apisign': signature} else: url = API_URL.format(API_METHODS[method]['path'], query) headers = None", "attempt = 0 status = 0 data = None while attempt < config['http_max_retries']:", "one or more of the passed extract dict paths contains invalid syntax. \"\"\"", "a dict containing the parsed JSON response. On a non-200 response, the raw", "\"\"\" Get the failure reason from the given extraction exception and API response", "get_ticks(self, pair: str, length: int=None) -> List[Dict[str, Any]]: \"\"\" Get ticks (closing values", "self.log.error(\"Failed executing buy order request: params {}, status {}, results {}.\", params, status,", "retry = True try: reason = data['message'] if data['message'] != '' else \"success", "Any], retry_data: bool): \"\"\" Handle any exception produced from an extract operation. Arguments:", "< config['http_max_retries']: try: async with self.session.get(url, headers=headers) as response: status = response.status if", "if not at least one element exists \"['result'][0]['T']\" ], 'getTicks', params=params, retry_data=True, retry_fail=True)", "(tuple): A tuple containing: data (object): On success, a dict containing the parsed", "return (market_summaries[pair]['Last'], market_summaries[pair]['BaseVolume']) async def buy_limit(self, pair: str, quantity: float, value: float): \"\"\"", "self.log.error(\"Failed getting ticks: params {}, status {}, results {}.\", params, status, results) return", "an error occurred or no ticks are available. \"\"\" params = [pair, self.tick_interval_str]", "(market_summaries[pair]['Last'], market_summaries[pair]['BaseVolume']) async def buy_limit(self, pair: str, quantity: float, value: float): \"\"\" \"\"\"", "\"\"\" Call a Bittrex API method. Implements retry and exponentional backoff for HTTP", "'scheduled for delisting' in notice: self.log.info(\"{} marked as inactive due to pending removal.\",", "price, or None if an error occurred. float: The current 24 hour volume,", "raise ValueError(\"Unsupported tick interval: {}\".format(config['tick_interval_secs'])) async def call(self, method: str, params: Sequence[Any]=None): \"\"\"", "API method and parameter list. Forms the full URL with query string and", "'baseCurrency': summary['Market']['BaseCurrency'], 'minTradeQty': summary['Market']['MinTradeSize'], 'minTradeSize': 0.0, 'minTradeValue': 0.0, 'baseVolume': summary['Summary']['BaseVolume'], 'prevDay': prev_day, 'last':", "response\" retry = True if not retry: try: data = json.loads(raw_data) _ =", "a connection or transport failure. Raises: SyntaxError, NameError: If one or more of", "Returns: The market summaries dict. \"\"\" results, status, = await self.call_extract([ \"['result']\", \"['result'][0]['Market']['BaseCurrency']\",", "unretryable error occurred. (str): Sentence fragment or formatted traceback describing the reason for", "response: status = response.status if status >= 200 and status <= 399: data", "= hmac.new(api_secret.encode(), url.encode(), hashlib.sha512).hexdigest() headers = {'apisign': signature} else: url = API_URL.format(API_METHODS[method]['path'], query)", "time.time() self.lock.release() return self.cache['marketSummariesV1']['data'] else: self.lock.release() return None self.cache['marketSummariesV1'] = { 'time': time.time(),", "'params': 'market={}', 'auth': False }, 'buyLimit': { 'path': 'v1.1/market/buylimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True", "prev_day: prev_day = last if notice: self.log.info(\"{} NOTICE: {}\", pair, notice) if 'will", "'minTradeSize': 0.0, 'minTradeValue': 0.0, 'baseVolume': summary['Summary']['BaseVolume'], 'prevDay': prev_day, 'last': last, } return summaries", "occurred. \"\"\" market_summaries = await self._get_market_summaries_v1() if market_summaries is None: return None return", "datetime, timezone from typing import Any, Dict, List, Sequence, Tuple import api import", "results[0]: self.log.error(\"Failed getting order: params{}, status {}, results {}.\", params, status, results) return", "response status code. A value of 0 indicates a connection or transport failure.", "if reason is not None: self.log.error(\"Giving up on: {}\", reason) return (tuple(results), status)", "return \"{} ({}: {})\".format(api_message, type(ex).__name__, ex) async def get_market_summaries(self) -> List[Dict[str, Any]]: \"\"\"", "empty response body, malformed response body (invalid JSON), or missing 'success' value. Arguments:", "tr a-z# n-za-m@)>' __version__ = \"0.2.0\" __all__ = ['Client'] import hmac import json", "or results is None or results[0] is None: self.log.error(\"Failed getting balance: params {},", "if time.time() - self.cache['marketSummariesV1']['time'] < config['tick_interval_secs']: self.log.debug(\"Returning cached data for marketSummariesV1.\", verbosity=1) self.lock.release()", "import asyncio import hashlib import traceback from datetime import datetime, timezone from typing", "data: Dict[str, Any]): \"\"\" Extract items from a dictionary of data. Arguments: extract:", "data (object): On success, a dict containing the parsed JSON response. On a", "order request: params {}, status {}, results {}.\", params, status, results) return None", "Not supported by the API, will always return all ticks. Returns: A list", "{} status {}, results {}.\", params, status, results) return None return results[0] async", "exception occurred. \"\"\" ex = None results = [] for item in extract:", "{ 'path': 'v1.1/account/getorder', 'params': 'uuid={}', 'auth': True }, 'getBalance': { 'path': 'v1.1/account/getbalance', 'params':", "last price and 24-hour volume for a currency pair from the API. Arguments:", "params = [pair, quantity, value] results, status = await self.call_extract([ \"['result']['uuid']\", ], 'sellLimit',", "True, will log the API JSON response. This is optional as some responses", "the full URL with query string and calculates any needed HMAC signature to", "str): \"\"\" \"\"\" params = [order_id] results, status = await self.call_extract([ \"['success']\", \"['result']['IsOpen']\",", "= 'apikey={}&nonce={}&'.format(api_key, nonce) + query url = API_URL.format(API_METHODS[method]['path'], query) signature = hmac.new(api_secret.encode(), url.encode(),", "{}.', status) data = await response.text() break except (aiohttp.ClientConnectionError, aiohttp.ClientPayloadError, asyncio.TimeoutError) as e:", "tick interval: {}\".format(config['tick_interval_secs'])) async def call(self, method: str, params: Sequence[Any]=None): \"\"\" Call a", "data to extract items from. Returns: (tuple): A tuple containing: list: Result of", "Raises: SyntaxError, NameError: If one or more of the passed extract dict paths", "for item in extract: try: expr = 'lambda d: d' + item expr_func", "thrown as a result of the extraction attempt. \"\"\" if 'message' in data", "set to None. On a non-200 response, the raw response body (may be", "async def get_order(self, pair: str, order_id: str): \"\"\" \"\"\" params = [order_id] results,", "parsed API response. ex: Exception thrown as a result of the extraction attempt.", "from an extract operation. Arguments: ex: Exception returned from :meth:`_extract_items`. data: Dictionary of", "self.lock.acquire() if 'marketSummariesV1' in self.cache: if time.time() - self.cache['marketSummariesV1']['time'] < config['tick_interval_secs']: self.log.debug(\"Returning cached", "JSON response. Implements retry and exponential backoff for higher-level API error conditions on", "notice or 'will be delisted' in notice or 'scheduled for delisting' in notice:", "elif isinstance(ex, (SyntaxError, NameError)): reason = \"{}: {}\\n{}\".format(type(ex).__name__, ex, ''.join(traceback.format_tb(ex.__traceback__))) retry = False", "= 'fiveMin' else: raise ValueError(\"Unsupported tick interval: {}\".format(config['tick_interval_secs'])) async def call(self, method: str,", "the raw response body (may be None). On a response with a missing", "items to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] data: Dictionary of data to extract items", "{}\".format(method), retry_reason) retry = False return (data, status) @staticmethod async def _get_request_data(method: str,", "self.log.debug(\"API method '{}({})' response:\\n{}\", method, params, json.dumps(data, indent=2)) if not data['success'] and retry_fail:", "= utils.logging.ChildLogger(parent=log, scope=self) \"\"\" Object logger. \"\"\" self.lock = asyncio.Lock() \"\"\" Lock used", "not data['success'] and retry_fail: retry = True try: reason = data['message'] if data['message']", "the failure reason from the given extraction exception and API response message (if", "status != 504) or (status in [0, 408, 429]): retry_reason = 'status {}'.format(status)", "occurred. (str): Sentence fragment or formatted traceback describing the reason for retry or", "Any, Dict, List, Sequence, Tuple import api import utils import common import configuration", "{}, results {}.\", params, status, results) return None return results[0] async def get_balance(self,", "list=None): \"\"\" Call a Bittrex API method and parse JSON response. Implements retry", "_ = data['success'] return (data, status) except json.JSONDecodeError: retry_reason = 'invalid JSON response'", "to pass to the method. retry_data: If True, will perform backoff and retry", "the request URL and headers for a given API method and parameter list.", "config['tick_interval_secs'] == 300: self.tick_interval_str = 'fiveMin' else: raise ValueError(\"Unsupported tick interval: {}\".format(config['tick_interval_secs'])) async", "results) return None return results[0] async def sell_limit(self, pair: str, quantity: float, value:", "results) return None summaries = {} for summary in results[0]: pair = summary['Summary']['MarketName']", "'v2.0/pub/markets/getMarketSummaries', 'params': '', 'auth': False }, 'getMarketSummariesV1': { 'path': 'v1.1/public/getMarketSummaries', 'params': '', 'auth':", "'success' value. Arguments: method: Name of the API method to call. params: Values", "or None if an error occurred or no ticks are available. \"\"\" params", "import hashlib import traceback from datetime import datetime, timezone from typing import Any,", "the response data items to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] method: Name of the", "pass to the method. Returns: (tuple): A tuple containing: data (str): The raw", "configuration.config \"\"\" Global configuration. \"\"\" TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' API_URL = 'https://bittrex.com/api/{}?{}' API_METHODS =", "for a given API method and parameter list. Forms the full URL with", "to :meth:`_extract_items`. retry_data: True if missing data should be retried, false otherwise. Returns:", "-*- \"\"\" Bittrex API module. \"\"\" __author__ = '<NAME> <$(echo nqnz.enshfr#tznvy.pbz | tr", "body, None. status (int): The HTTP response status code. A value of 0", "= [order_id] results, status = await self.call_extract([ \"['success']\", \"['result']['IsOpen']\", \"['result']['Quantity']\", \"['result']['QuantityRemaining']\", \"['result']['PricePerUnit']\", \"['result']['CommissionPaid']\",", "retry = True if retry: attempt += 1 await common.backoff(attempt, \"Bittrex call {}\".format(method),", "{}.\", status, results) return None summaries = {} for summary in results[0]: pair", "or results is None or results[0] is None: self.log.error(\"Failed executing sell order request:", "retry if not at least one element exists \"['result'][0]['T']\" ], 'getTicks', params=params, retry_data=True,", "(the default) first. Arguments: extract: A list of strings representing the dictionary paths", "the configured tick interval. \"\"\" if config['tick_interval_secs'] == 60: self.tick_interval_str = 'oneMin' elif", "for retry or error, or None if no issue occurred. \"\"\" if isinstance(ex,", "= config['bittrex_api_key'] api_secret = config['bittrex_api_secret'] query = 'apikey={}&nonce={}&'.format(api_key, nonce) + query url =", "in extract paths will not be retried. retry_fail: If True, will perform backoff", "is None: retry_reason = \"'None' on successful response\" retry = True if not", "\"['result'][0]['C']\", # To retry if not at least one element exists \"['result'][0]['T']\" ],", "\"\"\" Extract items from a dictionary of data. Arguments: extract: List of strings", "of 0 indicates a connection or transport failure. \"\"\" retry = False attempt", "length: int=None) -> List[Dict[str, Any]]: \"\"\" Get ticks (closing values and closing times)", "if an error occurred. \"\"\" market_summaries = await self._get_market_summaries_v1() if market_summaries is None:", "available. \"\"\" params = [pair, self.tick_interval_str] results, status, = await self.call_extract([ \"['result']\", \"['result'][0]['C']\",", "balance = results[0] self.cache['balance'][base] = { 'time': time.time(), 'data': balance } return balance", "data (object): On a normal 200 response, a tuple containing the values for", "removed' in notice or 'will be delisted' in notice or 'scheduled for delisting'", "reason for retry or error, or None if no issue occurred. \"\"\" if", "that failed to be extracted after exhausting all retries, or had syntax errors", "extraction error occurred. Exception: The last exception that occurred during extraction, or None", "Sequence[str], method: str, params: Sequence[Any]=None, retry_data=False, retry_fail=False, log=False): \"\"\" Call a Bittrex API", "try: expr = 'lambda d: d' + item expr_func = eval(expr) # pylint:", "@staticmethod async def _handle_extract_exception(ex: Exception, data: Dict[str, Any], retry_data: bool): \"\"\" Handle any", "= ['Client'] import hmac import json import time import asyncio import hashlib import", "self.tick_interval_str = 'fiveMin' else: raise ValueError(\"Unsupported tick interval: {}\".format(config['tick_interval_secs'])) async def call(self, method:", "the last price and 24-hour volume for a currency pair from the API.", "an error occurred. float: The current 24 hour volume, or None if an", "True else: self.log.error('Got non-retryable status {}.', status) data = await response.text() break except", "If one or more of the passed extract dict paths contains invalid syntax.", "call_extract {}\".format(method), reason) retry = False else: break if reason is not None:", "ticks are available. \"\"\" params = [pair, self.tick_interval_str] results, status, = await self.call_extract([", "\"\"\" Object logger. \"\"\" self.lock = asyncio.Lock() \"\"\" Lock used for syncing access", "API_URL = 'https://bittrex.com/api/{}?{}' API_METHODS = { 'getMarketSummaries': { 'path': 'v2.0/pub/markets/getMarketSummaries', 'params': '', 'auth':", "params=params, log=True, retry_data=True) if status != 200 or results is None or not", "'{}({})' response:\\n{}\", method, params, json.dumps(data, indent=2)) if not data['success'] and retry_fail: retry =", "attempt += 1 await common.backoff(attempt, \"Bittrex call_extract {}\".format(method), reason) retry = False else:", "The current 24 hour volume, or None if an error occurred. \"\"\" market_summaries", "data['message'] and data['message'] != '': api_message = data['message'] else: api_message = 'empty or", "def _extract_items(extract: Sequence[str], data: Dict[str, Any]): \"\"\" Extract items from a dictionary of", "results[0] is None: self.log.error(\"Failed getting balance: params {}, status {}, results {}.\", params,", "None: return None return (market_summaries[pair]['Last'], market_summaries[pair]['BaseVolume']) async def buy_limit(self, pair: str, quantity: float,", "will always return all ticks. Returns: A list of the raw tick data", "elif ex is not None: reason = await Client._get_extract_failure_reason(ex, data) retry = False", "= summary['Market']['Notice'] last = summary['Summary']['Last'] prev_day = summary['Summary']['PrevDay'] if not prev_day: prev_day =", "= { 'time': time.time(), 'data': balance } return balance async def _get_market_summaries_v1(self): \"\"\"", "{}.\", status, results) if 'marketSummariesV1' in self.cache: self.cache['marketSummariesV1']['time'] = time.time() self.lock.release() return self.cache['marketSummariesV1']['data']", "is None or results[0] is None: self.log.error(\"Failed executing cancel order request: params {}", "\"['result'][0]['Last']\", # For retry of missing fields \"['result'][0]['BaseVolume']\", \"['result'][0]['PrevDay']\", ], 'getMarketSummariesV1', retry_data=True) if", "Bittrex API method and parse JSON response. Implements retry and exponential backoff for", "await self.call_extract([ \"['result']\", \"['result'][0]['C']\", # To retry if not at least one element", "else: self.log.error(\"Failed getting v1 market summaries: status {}, results {}.\", status, results) if", "status, = await self.call_extract([ \"['result']\", \"['result'][0]['Market']['BaseCurrency']\", # To retry on any missing fields", "active, 'baseCurrency': summary['Market']['BaseCurrency'], 'minTradeQty': summary['Market']['MinTradeSize'], 'minTradeSize': 0.0, 'minTradeValue': 0.0, 'baseVolume': summary['Summary']['BaseVolume'], 'prevDay': prev_day,", "results {}.\", params, status, results) return None balance = results[0] self.cache['balance'][base] = {", "'getBalance', params=params, log=True, retry_data=True) if status != 200 or results is None or", "Any items that failed to be extracted after exhausting all retries, or had", "body (may be None). On a 200 response with a missing response body,", "+ item expr_func = eval(expr) # pylint: disable=W0123 results.append(expr_func(data)) except (TypeError, IndexError, KeyError,", "- self.cache['marketSummariesV1']['time'] < config['tick_interval_secs']: self.log.debug(\"Returning cached data for marketSummariesV1.\", verbosity=1) self.lock.release() return self.cache['marketSummariesV1']['data']", "extract items from. Returns: (tuple): A tuple containing: list: Result of each extracted", "JSON), or missing 'success' value. Arguments: method: Name of the API method to", "'sellLimit': { 'path': 'v1.1/market/selllimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True }, 'cancelOrder': { 'path': 'v1.1/market/cancel',", "results {}.\", params, status, results) return None return { 'open': results[1], 'quantity': results[2],", "500 and status <= 599 and status != 504) or (status in [0,", "results) return None return results[0] async def get_balance(self, base: str): \"\"\" \"\"\" params", "\"\"\" Call a Bittrex API method and extract data items from its JSON", "await self.call(method, params) if status != 200: return (raw_data, status) if raw_data is", "self.log.error(\"Giving up on: {}\", reason) return (tuple(results), status) @staticmethod async def _extract_items(extract: Sequence[str],", "= 'lambda d: d' + item expr_func = eval(expr) # pylint: disable=W0123 results.append(expr_func(data))", "in extract: try: expr = 'lambda d: d' + item expr_func = eval(expr)", "from typing import Any, Dict, List, Sequence, Tuple import api import utils import", "'uuid={}', 'auth': True }, 'getBalance': { 'path': 'v1.1/account/getbalance', 'params': 'currency={}', 'auth': True },", "False }, 'buyLimit': { 'path': 'v1.1/market/buylimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True }, 'sellLimit': {", "str) -> Tuple[float, float]: \"\"\" Get the last price and 24-hour volume for", "!= 200 or results is None or results[0] is None: self.log.error(\"Failed executing cancel", "results {}.\", params, status, results) return None for tick in results[0]: close_datetime =", "'auth': False }, 'getLatestTick': { 'path': 'v2.0/pub/market/getLatestTick', 'params': 'marketName={}&tickInterval={}', 'auth': False }, 'getTicker':", "type(ex).__name__, ex) async def get_market_summaries(self) -> List[Dict[str, Any]]: \"\"\" Get the market summaries", "The market summaries dict. \"\"\" results, status, = await self.call_extract([ \"['result']\", \"['result'][0]['Market']['BaseCurrency']\", #", "return self.cache['marketSummariesV1']['data'] else: self.lock.release() return None self.cache['marketSummariesV1'] = { 'time': time.time(), 'data': market_summaries", "failure. Raises: SyntaxError, NameError: If one or more of the passed extract dict", "{ 'path': 'v1.1/public/getMarketSummaries', 'params': '', 'auth': False }, 'getTicks': { 'path': 'v2.0/pub/market/getTicks', 'params':", "String representation of the configured tick interval. \"\"\" if config['tick_interval_secs'] == 60: self.tick_interval_str", "\"\"\" Get the market summaries from the Bittrex API. Returns: The market summaries", "and exponentional backoff for HTTP level error conditions. Arguments: method: Name of the", "of the configured tick interval. \"\"\" if config['tick_interval_secs'] == 60: self.tick_interval_str = 'oneMin'", "items from a dictionary of data. Arguments: extract: List of strings representing the", "elif config['tick_interval_secs'] == 300: self.tick_interval_str = 'fiveMin' else: raise ValueError(\"Unsupported tick interval: {}\".format(config['tick_interval_secs']))", "results[0] async def get_order(self, pair: str, order_id: str): \"\"\" \"\"\" params = [order_id]", "call_extract(self, extract: Sequence[str], method: str, params: Sequence[Any]=None, retry_data=False, retry_fail=False, log=False): \"\"\" Call a", "v1 API is kept current (unlike v2). \"\"\" await self.lock.acquire() if 'marketSummariesV1' in", "reason = data['message'] if data['message'] != '' else \"success == false (blank message)\"", "containing: (str): Full URL for the request. (dict): Dictionary of headers for the", "1 await common.backoff(attempt, \"Bittrex call_extract {}\".format(method), reason) retry = False else: break if", "of 0 indicates a connection or transport failure. Raises: SyntaxError, NameError: If one", "!= 200 or results is None or results[0] is None: self.log.error(\"Failed executing sell", "if retry: attempt += 1 await common.backoff(attempt, \"Bittrex call {}\".format(method), retry_reason) retry =", "A tuple containing: data (object): On success, a dict containing the parsed JSON", "504) or (status in [0, 408, 429]): retry_reason = 'status {}'.format(status) retry =", "Tuple[float, float]: \"\"\" Get the last price and 24-hour volume for a currency", "+ query url = API_URL.format(API_METHODS[method]['path'], query) signature = hmac.new(api_secret.encode(), url.encode(), hashlib.sha512).hexdigest() headers =", "return None return results[0] async def sell_limit(self, pair: str, quantity: float, value: float):", "if status != 200 or data is None: self.log.error(\"Failed on API method '{}({})':", "async def get_ticks(self, pair: str, length: int=None) -> List[Dict[str, Any]]: \"\"\" Get ticks", "config['bittrex_api_secret'] query = 'apikey={}&nonce={}&'.format(api_key, nonce) + query url = API_URL.format(API_METHODS[method]['path'], query) signature =", "occurred or no ticks are available. \"\"\" params = [pair, self.tick_interval_str] results, status,", "data) return (data, status) if log: self.log.debug(\"API method '{}({})' response:\\n{}\", method, params, json.dumps(data,", "pair name eg. 'BTC-ETH' Returns: (tuple): A tuple containing: float: The current close", "Arguments: data: Dict of the parsed API response. ex: Exception thrown as a", "on explicit failure response from the API. log: If True, will log the", "= True try: reason = data['message'] if data['message'] != '' else \"success ==", "error occurred. (str): Sentence fragment or formatted traceback describing the reason for retry", "marked as inactive due to pending removal.\", pair) active = False summaries[pair] =", "parse JSON response. Implements retry and exponential backoff for higher-level API error conditions", "the parsed API response. ex: Exception thrown as a result of the extraction", "currency pair from the API. Arguments: pair: Currency pair name eg. 'BTC-ETH' Returns:", "common import configuration import aiohttp config = configuration.config \"\"\" Global configuration. \"\"\" TIME_FORMAT", "tuple containing: data (str): The raw HTTP response body (may be None). status", "retry_fail=True) if status != 200 or results is None or results[0] is None:", "Bittrex API module. \"\"\" __author__ = '<NAME> <$(echo nqnz.enshfr#tznvy.pbz | tr a-z# n-za-m@)>'", "large. Returns: (tuple): A tuple containing: data (object): On a normal 200 response,", "False }, 'getMarketSummariesV1': { 'path': 'v1.1/public/getMarketSummaries', 'params': '', 'auth': False }, 'getTicks': {", "and data['message'] != '': api_message = data['message'] else: api_message = 'empty or missing", "response. ex: Exception thrown as a result of the extraction attempt. \"\"\" if", "if an error occurred or no ticks are available. \"\"\" params = [pair,", "config['http_max_retries']: try: async with self.session.get(url, headers=headers) as response: status = response.status if status", "malformed response body (invalid JSON), or missing 'success' value. Arguments: method: Name of", "failed to be extracted after exhausting all retries, or had syntax errors in", "to the method. Returns: (tuple): A tuple containing: data (str): The raw HTTP", "fields \"['result'][0]['BaseVolume']\", \"['result'][0]['PrevDay']\", ], 'getMarketSummariesV1', retry_data=True) if status == 200 and results is", "_get_market_summaries_v1(self): \"\"\" Get v1 market summaries from the API, cached for the current", "a-z# n-za-m@)>' __version__ = \"0.2.0\" __all__ = ['Client'] import hmac import json import", "to be extracted after exhausting all retries, or had syntax errors in extract", "get_order(self, pair: str, order_id: str): \"\"\" \"\"\" params = [order_id] results, status =", "{}, results {}.\", params, status, results) return None for tick in results[0]: close_datetime", "\"\"\" \"\"\" params = [pair, quantity, value] results, status = await self.call_extract([ \"['result']['uuid']\",", "specifically empty response body, malformed response body (invalid JSON), or missing 'success' value.", "the specified extract dict keys are correct to avoid repeating of non-idempotent operations", "for tick in results[0]: close_datetime = datetime.strptime(tick['T'], TIME_FORMAT) tick['T'] = close_datetime.replace(tzinfo=timezone.utc).timestamp() return results[0]", "+= 1 await common.backoff(attempt, \"Bittrex call {}\".format(method), retry_reason) retry = False return (data,", "API module. \"\"\" __author__ = '<NAME> <$(echo nqnz.enshfr#tznvy.pbz | tr a-z# n-za-m@)>' __version__", "containing the parsed JSON response. On a non-200 response, the raw response body", "'path': 'v1.1/account/getorder', 'params': 'uuid={}', 'auth': True }, 'getBalance': { 'path': 'v1.1/account/getbalance', 'params': 'currency={}',", "correct to avoid repeating of non-idempotent operations (such as buying or selling) so", "200 or results is None or results[0] is None: self.log.error(\"Failed getting market summaries:", "import json import time import asyncio import hashlib import traceback from datetime import", "reason is not None: self.log.error(\"Giving up on: {}\", reason) return (tuple(results), status) @staticmethod", "is not None and results[0] is not None: market_summaries = {} for result", "await Client._get_extract_failure_reason(ex, data) retry = False else: reason = None retry = False", "'balance': {} } \"\"\" Response cache. \"\"\" self.tick_interval_str: str \"\"\" String representation of", "is used for batching tick updates, since the v1 API is kept current", "{}\\n{}\".format(type(ex).__name__, ex, ''.join(traceback.format_tb(ex.__traceback__))) retry = False elif ex is not None: reason =", "def _handle_extract_exception(ex: Exception, data: Dict[str, Any], retry_data: bool): \"\"\" Handle any exception produced", "{}.\", params, status, results) return None balance = results[0] self.cache['balance'][base] = { 'time':", "KeyError)): reason = await Client._get_extract_failure_reason(ex, data) if retry_data and data['success']: retry = True", "return (url, headers) async def call_json(self, method: str, params: list=None): \"\"\" Call a", "API.\") async def get_last_values(self, pair: str) -> Tuple[float, float]: \"\"\" Get the last", "\"['result']['uuid']\", ], 'sellLimit', params=params, log=True, retry_data=True) if status != 200 or results is", "not None: reason = await Client._get_extract_failure_reason(ex, data) retry = False else: reason =", "result of the extraction attempt. \"\"\" if 'message' in data and data['message'] and", "[pair, self.tick_interval_str] results, status, = await self.call_extract([ \"['result']\", \"['result'][0]['C']\", # To retry if", "def _get_market_summaries_v1(self): \"\"\" Get v1 market summaries from the API, cached for the", "success, a dict containing the parsed JSON response. On a non-200 response, the", "and 24-hour volume for a currency pair from the API. Arguments: pair: Currency", "Client(api.Client): \"\"\" Client for interacting with the Bittrex API. \"\"\" def __init__(self, session:", "results.append(None) return (results, ex) @staticmethod async def _handle_extract_exception(ex: Exception, data: Dict[str, Any], retry_data:", "}, } class Client(api.Client): \"\"\" Client for interacting with the Bittrex API. \"\"\"", "Dictionary of headers for the request, or None if no headers are required.", "{ 'getMarketSummaries': { 'path': 'v2.0/pub/markets/getMarketSummaries', 'params': '', 'auth': False }, 'getMarketSummariesV1': { 'path':", "params, status, data) return (data, status) if log: self.log.debug(\"API method '{}({})' response:\\n{}\", method,", "any needed HMAC signature to be passed in headers. Arguments: method: Name of", "'remaining': results[3], 'value': results[4], 'fees': results[5], } async def cancel_order(self, pair: str, order_id:", "a retry, False if no error or and unretryable error occurred. (str): Sentence", "operations (such as buying or selling) so should always be tested with retry=False", "Get the request URL and headers for a given API method and parameter", "results[0] is None: self.log.error(\"Failed executing cancel order request: params {} status {}, results", "params) while attempt < config['http_max_retries']: try: async with self.session.get(url, headers=headers) as response: status", "int(time.time() * 1000) api_key = config['bittrex_api_key'] api_secret = config['bittrex_api_secret'] query = 'apikey={}&nonce={}&'.format(api_key, nonce)", "on empty or missing data items. Syntax errors in extract paths will not", "traceback from datetime import datetime, timezone from typing import Any, Dict, List, Sequence,", "operation. Arguments: ex: Exception returned from :meth:`_extract_items`. data: Dictionary of data passed to", "values and closing times) for a pair from the Bittrex API. Arguments: pair:", "contains invalid syntax. \"\"\" retry = False attempt = 0 while attempt <=", "log=False): \"\"\" Call a Bittrex API method and extract data items from its", "-> List[Dict[str, Any]]: \"\"\" Get the market summaries from the Bittrex API. Returns:", "API method and extract data items from its JSON response. Implements retry and", "200 or data is None: self.log.error(\"Failed on API method '{}({})': status {}, data", "!= 200 or results is None or results[0] is None: self.log.error(\"Failed executing buy", "the API, cached for the current tick interval. Converts the response list to", "method. Returns: (tuple): A tuple containing: data (object): On success, a dict containing", "__all__ = ['Client'] import hmac import json import time import asyncio import hashlib", "the API method to call. params: Values of query parameters to pass to", "'path': 'v1.1/market/buylimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True }, 'sellLimit': { 'path': 'v1.1/market/selllimit', 'params': 'market={}&quantity={}&rate={}',", "async def call_extract(self, extract: Sequence[str], method: str, params: Sequence[Any]=None, retry_data=False, retry_fail=False, log=False): \"\"\"", "data from the API, or None if an error occurred or no ticks", "by the Bittrex API.\") async def get_last_values(self, pair: str) -> Tuple[float, float]: \"\"\"", "None return results[0] async def get_order(self, pair: str, order_id: str): \"\"\" \"\"\" params", "retries, or had syntax errors in extract paths will be set to None.", "configuration. \"\"\" TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' API_URL = 'https://bittrex.com/api/{}?{}' API_METHODS = { 'getMarketSummaries': {", "_get_request_data(method: str, params: Sequence[Any]=None): \"\"\" Get the request URL and headers for a", "False attempt = 0 while attempt <= config['api_max_retries']: data, status = await self.call_json(method,", "indicates a connection or transport failure. \"\"\" retry = False attempt = 0", "is None: self.log.error(\"Failed executing sell order request: params {}, status {}, results {}.\",", "'BTC-ETH'. length: Not supported by the API, will always return all ticks. Returns:", "\"\"\" self.lock = asyncio.Lock() \"\"\" Lock used for syncing access to API data.", "time.time(), 'data': balance } return balance async def _get_market_summaries_v1(self): \"\"\" Get v1 market", "= { 'getMarketSummaries': { 'path': 'v2.0/pub/markets/getMarketSummaries', 'params': '', 'auth': False }, 'getMarketSummariesV1': {", "= asyncio.Lock() \"\"\" Lock used for syncing access to API data. \"\"\" self.cache", "= 0 status = 0 data = None url, headers = await self._get_request_data(method,", "first. Arguments: extract: A list of strings representing the dictionary paths of the", "HMAC signature to be passed in headers. Arguments: method: Name of the API", "error or and unretryable error occurred. (str): Sentence fragment or formatted traceback describing", "= data['message'] if data['message'] != '' else \"success == false (blank message)\" except", "method, params, status, data) return (data, status) if log: self.log.debug(\"API method '{}({})' response:\\n{}\",", "missing fields \"['result'][0]['Market']['MinTradeSize']\", \"['result'][0]['Market']['IsActive']\", \"['result'][0]['Market']['Notice']\", \"['result'][0]['Summary']['MarketName']\", \"['result'][0]['Summary']['BaseVolume']\", \"['result'][0]['Summary']['PrevDay']\", \"['result'][0]['Summary']['Last']\", ], 'getMarketSummaries', retry_data=True, retry_fail=True)", "{} for summary in results[0]: pair = summary['Summary']['MarketName'] active = summary['Market']['IsActive'] notice =", ":meth:`_extract_items`. data: Dictionary of data passed to :meth:`_extract_items`. retry_data: True if missing data", "all ticks. Returns: A list of the raw tick data from the API,", "A tuple containing: (bool): True if the exception warrants a retry, False if", "response data items to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] data: Dictionary of data to", "def get_last_values(self, pair: str) -> Tuple[float, float]: \"\"\" Get the last price and", "errors in extract paths will be set to None. On a non-200 response,", "a 200 response with a missing response body, None. status (int): The HTTP", "items to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] method: Name of the API method to", "\"\"\" params = [order_id] results, status = await self.call_extract([ \"['success']\" ], 'cancelOrder', params=params,", "Arguments: pair: Currency pair name eg. 'BTC-ETH' Returns: (tuple): A tuple containing: float:", "configured tick interval. \"\"\" if config['tick_interval_secs'] == 60: self.tick_interval_str = 'oneMin' elif config['tick_interval_secs']", "a normal 200 response, a tuple containing the values for each extracted item.", "self.log.debug(\"Returning cached data for marketSummariesV1.\", verbosity=1) self.lock.release() return self.cache['marketSummariesV1']['data'] results, status = await", "429]): retry_reason = 'status {}'.format(status) retry = True else: self.log.error('Got non-retryable status {}.',", "try: data = json.loads(raw_data) _ = data['success'] return (data, status) except json.JSONDecodeError: retry_reason", "to API data. \"\"\" self.cache = { 'balance': {} } \"\"\" Response cache.", "status {}.', status) data = await response.text() break except (aiohttp.ClientConnectionError, aiohttp.ClientPayloadError, asyncio.TimeoutError) as", "List[Dict[str, Any]]: \"\"\" Get ticks (closing values and closing times) for a pair", "retry: attempt += 1 await common.backoff(attempt, \"Bittrex call {}\".format(method), retry_reason) retry = False", "retry_data=True) if status != 200 or results is None or not results[0]: self.log.error(\"Failed", "retry_data=True) if status != 200 or results is None or results[0] is None:", "results = [] for item in extract: try: expr = 'lambda d: d'", "= {} for result in results[0]: market_summaries[result['MarketName']] = result else: self.log.error(\"Failed getting v1", "This data is used for batching tick updates, since the v1 API is", "await self.call_extract([ \"['result']['uuid']\", ], 'sellLimit', params=params, log=True, retry_data=True) if status != 200 or", "str, length: int=None) -> List[Dict[str, Any]]: \"\"\" Get ticks (closing values and closing", "occurred. \"\"\" ex = None results = [] for item in extract: try:", "pair from the Bittrex API. \"\"\" raise NotImplementedError(\"Tick range not supported by the", "'getOrder': { 'path': 'v1.1/account/getorder', 'params': 'uuid={}', 'auth': True }, 'getBalance': { 'path': 'v1.1/account/getbalance',", "return (results, ex) @staticmethod async def _handle_extract_exception(ex: Exception, data: Dict[str, Any], retry_data: bool):", "= 'invalid JSON response' except KeyError: retry_reason = \"missing 'success' value\" retry =", "else: retry = False elif isinstance(ex, (SyntaxError, NameError)): reason = \"{}: {}\\n{}\".format(type(ex).__name__, ex,", "hashlib import traceback from datetime import datetime, timezone from typing import Any, Dict,", "(data, status) except json.JSONDecodeError: retry_reason = 'invalid JSON response' except KeyError: retry_reason =", "if 'marketSummariesV1' in self.cache: self.cache['marketSummariesV1']['time'] = time.time() self.lock.release() return self.cache['marketSummariesV1']['data'] else: self.lock.release() return", "a range of ticks (closing values and closing times) for a pair from", "params: list=None): \"\"\" Call a Bittrex API method and parse JSON response. Implements", "{}.\", params, status, results) return None for tick in results[0]: close_datetime = datetime.strptime(tick['T'],", "None: self.log.error(\"Failed getting market summaries: status {}, results {}.\", status, results) return None", "results, status, = await self.call_extract([ \"['result']\", \"['result'][0]['Market']['BaseCurrency']\", # To retry on any missing", "error occurred. float: The current 24 hour volume, or None if an error", "def get_tick_range(self, pair: str, start_time: float, end_time: float) -> List[Dict[str, Any]]: \"\"\" Get", "datetime.strptime(tick['T'], TIME_FORMAT) tick['T'] = close_datetime.replace(tzinfo=timezone.utc).timestamp() return results[0] async def get_tick_range(self, pair: str, start_time:", "= False attempt = 0 while attempt <= config['api_max_retries']: data, status = await", "ticks. Returns: A list of the raw tick data from the API, or", "On success, a dict containing the parsed JSON response. On a non-200 response,", "Object HTTP client session. \"\"\" self.log = utils.logging.ChildLogger(parent=log, scope=self) \"\"\" Object logger. \"\"\"", "is optional as some responses can be quite large. Returns: (tuple): A tuple", "is None or results[0] is None: self.log.error(\"Failed executing buy order request: params {},", "def get_order(self, pair: str, order_id: str): \"\"\" \"\"\" params = [order_id] results, status", "A list of the raw tick data from the API, or None if", "of the parsed API response. ex: Exception thrown as a result of the", "reason = \"success == false (missing message)\" if not retry: results, ex =", "params{}, status {}, results {}.\", params, status, results) return None return { 'open':", "results[5], } async def cancel_order(self, pair: str, order_id: str): \"\"\" \"\"\" params =", "as some responses can be quite large. Returns: (tuple): A tuple containing: data", "float, value: float): \"\"\" \"\"\" params = [pair, quantity, value] results, status =", "balance } return balance async def _get_market_summaries_v1(self): \"\"\" Get v1 market summaries from", "(str): The raw HTTP response body (may be None). status (int): The HTTP", "of query parameters to pass to the method. retry_data: If True, will perform", "await self.call_extract([ \"['result']['uuid']\", ], 'buyLimit', params=params, log=True) if status != 200 or results", "async def _get_market_summaries_v1(self): \"\"\" Get v1 market summaries from the API, cached for", "data, retry_data) if retry: attempt += 1 await common.backoff(attempt, \"Bittrex call_extract {}\".format(method), reason)", "Arguments: extract: List of strings representing the dictionary paths of the response data", "base: str): \"\"\" \"\"\" params = [base] results, status = await self.call_extract([ \"['result']['Available']\",", "of each extracted path, or None if a syntax or or extraction error", "{}\".format(method), retry_reason) retry = False return (data, status) async def call_extract(self, extract: Sequence[str],", "\"\"\" self.tick_interval_str: str \"\"\" String representation of the configured tick interval. \"\"\" if", "None: self.log.error(\"Failed getting ticks: params {}, status {}, results {}.\", params, status, results)", "True if missing data should be retried, false otherwise. Returns: (tuple): A tuple", "executing sell order request: params {}, status {}, results {}.\", params, status, results)", "summaries: status {}, results {}.\", status, results) if 'marketSummariesV1' in self.cache: self.cache['marketSummariesV1']['time'] =", "= 'status {}'.format(status) retry = True else: self.log.error('Got non-retryable status {}.', status) data", "200: return (raw_data, status) if raw_data is None: retry_reason = \"'None' on successful", "\"\"\" retry = False attempt = 0 while attempt <= config['api_max_retries']: data, status", "For retry of missing fields \"['result'][0]['BaseVolume']\", \"['result'][0]['PrevDay']\", ], 'getMarketSummariesV1', retry_data=True) if status ==", "self.cache['marketSummariesV1']['data'] else: self.lock.release() return None self.cache['marketSummariesV1'] = { 'time': time.time(), 'data': market_summaries }", "current close price, or None if an error occurred. float: The current 24", "log: self.log.debug(\"API method '{}({})' response:\\n{}\", method, params, json.dumps(data, indent=2)) if not data['success'] and", "method. Implements retry and exponentional backoff for HTTP level error conditions. Arguments: method:", "status = await self.call(method, params) if status != 200: return (raw_data, status) if", "if not data['success'] and retry_fail: retry = True try: reason = data['message'] if", "= API_METHODS[method]['params'].format(*params or []) if API_METHODS[method]['auth']: nonce = int(time.time() * 1000) api_key =", "await self.call_json(method, params) if status != 200 or data is None: self.log.error(\"Failed on", "sell_limit(self, pair: str, quantity: float, value: float): \"\"\" \"\"\" params = [pair, quantity,", "async with self.session.get(url, headers=headers) as response: status = response.status if status >= 200", "eval(expr) # pylint: disable=W0123 results.append(expr_func(data)) except (TypeError, IndexError, KeyError, SyntaxError, NameError) as e:", "status != 200 or data is None: self.log.error(\"Failed on API method '{}({})': status", "'auth': False }, 'getTicks': { 'path': 'v2.0/pub/market/getTicks', 'params': 'marketName={}&tickInterval={}', 'auth': False }, 'getLatestTick':", "will log the API JSON response. This is optional as some responses can", "isinstance(ex, (SyntaxError, NameError)): reason = \"{}: {}\\n{}\".format(type(ex).__name__, ex, ''.join(traceback.format_tb(ex.__traceback__))) retry = False elif", "length: Not supported by the API, will always return all ticks. Returns: A", "= {'apisign': signature} else: url = API_URL.format(API_METHODS[method]['path'], query) headers = None return (url,", "= 0 status = 0 data = None while attempt < config['http_max_retries']: raw_data,", "status = await self.call_extract([ \"['result']['uuid']\", ], 'sellLimit', params=params, log=True, retry_data=True) if status !=", "that the specified extract dict keys are correct to avoid repeating of non-idempotent", "float) -> List[Dict[str, Any]]: \"\"\" Get a range of ticks (closing values and", "headers. Arguments: method: Name of the API method to call. params: Values of", "(data, status) async def call_extract(self, extract: Sequence[str], method: str, params: Sequence[Any]=None, retry_data=False, retry_fail=False,", "of data passed to :meth:`_extract_items`. retry_data: True if missing data should be retried,", "'buyLimit': { 'path': 'v1.1/market/buylimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True }, 'sellLimit': { 'path': 'v1.1/market/selllimit',", ":meth:`_extract_items`. retry_data: True if missing data should be retried, false otherwise. Returns: (tuple):", "self.call_extract([ \"['result']['uuid']\", ], 'buyLimit', params=params, log=True) if status != 200 or results is", "signature} else: url = API_URL.format(API_METHODS[method]['path'], query) headers = None return (url, headers) async", "return None summaries = {} for summary in results[0]: pair = summary['Summary']['MarketName'] active", "updates, since the v1 API is kept current (unlike v2). \"\"\" await self.lock.acquire()", "or None if no headers are required. \"\"\" query = API_METHODS[method]['params'].format(*params or [])", "status {}, results {}.\", params, status, results) return None balance = results[0] self.cache['balance'][base]", "current (unlike v2). \"\"\" await self.lock.acquire() if 'marketSummariesV1' in self.cache: if time.time() -", "await self.call_extract([ \"['success']\", \"['result']['IsOpen']\", \"['result']['Quantity']\", \"['result']['QuantityRemaining']\", \"['result']['PricePerUnit']\", \"['result']['CommissionPaid']\", ], 'getOrder', params=params, log=True, retry_data=True)", "self.call_extract([ \"['result']\", \"['result'][0]['C']\", # To retry if not at least one element exists", "data: Dictionary of data passed to :meth:`_extract_items`. retry_data: True if missing data should", "retry = True else: retry = False elif isinstance(ex, (SyntaxError, NameError)): reason =", "await self.call_extract([ \"['result']\", \"['result'][0]['Last']\", # For retry of missing fields \"['result'][0]['BaseVolume']\", \"['result'][0]['PrevDay']\", ],", "or formatted traceback describing the reason for retry or error, or None if", "path, or None if a syntax or or extraction error occurred. Exception: The", "# To retry on any missing fields \"['result'][0]['Market']['MinTradeSize']\", \"['result'][0]['Market']['IsActive']\", \"['result'][0]['Market']['Notice']\", \"['result'][0]['Summary']['MarketName']\", \"['result'][0]['Summary']['BaseVolume']\", \"['result'][0]['Summary']['PrevDay']\",", "API_URL.format(API_METHODS[method]['path'], query) signature = hmac.new(api_secret.encode(), url.encode(), hashlib.sha512).hexdigest() headers = {'apisign': signature} else: url", "retry_reason = \"'None' on successful response\" retry = True if not retry: try:", "v1 market summaries from the API, cached for the current tick interval. Converts", "Bittrex API. \"\"\" def __init__(self, session: aiohttp.ClientSession, log=utils.logging.DummyLogger()): self.session = session \"\"\" Object", "@staticmethod async def _get_request_data(method: str, params: Sequence[Any]=None): \"\"\" Get the request URL and", "TIME_FORMAT) tick['T'] = close_datetime.replace(tzinfo=timezone.utc).timestamp() return results[0] async def get_tick_range(self, pair: str, start_time: float,", "delisted' in notice or 'scheduled for delisting' in notice: self.log.info(\"{} marked as inactive", "'data': balance } return balance async def _get_market_summaries_v1(self): \"\"\" Get v1 market summaries", "== 300: self.tick_interval_str = 'fiveMin' else: raise ValueError(\"Unsupported tick interval: {}\".format(config['tick_interval_secs'])) async def", "\"Bittrex call_extract {}\".format(method), reason) retry = False else: break if reason is not", "response, the raw response body (may be None). On a response with a", "raw response body (may be None). On a response with a missing response", "params {}, status {}, results {}.\", params, status, results) return None balance =", "return (data, status) if log: self.log.debug(\"API method '{}({})' response:\\n{}\", method, params, json.dumps(data, indent=2))", "ex = e results.append(None) return (results, ex) @staticmethod async def _handle_extract_exception(ex: Exception, data:", "__version__ = \"0.2.0\" __all__ = ['Client'] import hmac import json import time import", "in results[0]: close_datetime = datetime.strptime(tick['T'], TIME_FORMAT) tick['T'] = close_datetime.replace(tzinfo=timezone.utc).timestamp() return results[0] async def", "None: retry_reason = \"'None' on successful response\" retry = True if not retry:", "= await Client._get_extract_failure_reason(ex, data) if retry_data and data['success']: retry = True else: retry", "async def get_tick_range(self, pair: str, start_time: float, end_time: float) -> List[Dict[str, Any]]: \"\"\"", "return None return { 'open': results[1], 'quantity': results[2], 'remaining': results[3], 'value': results[4], 'fees':", "params = [pair, self.tick_interval_str] results, status, = await self.call_extract([ \"['result']\", \"['result'][0]['C']\", # To", "= [order_id] results, status = await self.call_extract([ \"['success']\" ], 'cancelOrder', params=params, log=True, retry_data=True)", "}, 'getBalance': { 'path': 'v1.1/account/getbalance', 'params': 'currency={}', 'auth': True }, } class Client(api.Client):", "self.log.error(\"Failed on API method '{}({})': status {}, data {}\", method, params, status, data)", "API. Arguments: pair: Currency pair name eg. 'BTC-ETH' Returns: (tuple): A tuple containing:", "raw_data is None: retry_reason = \"'None' on successful response\" retry = True if", "data: Dictionary of data to extract items from. Returns: (tuple): A tuple containing:", "except (TypeError, IndexError, KeyError, SyntaxError, NameError) as e: ex = e results.append(None) return", "'v1.1/market/cancel', 'params': 'uuid={}', 'auth': True }, 'getOrder': { 'path': 'v1.1/account/getorder', 'params': 'uuid={}', 'auth':", "# -*- coding: utf-8 -*- \"\"\" Bittrex API module. \"\"\" __author__ = '<NAME>", "except KeyError: retry_reason = \"missing 'success' value\" retry = True if retry: attempt", "extract: try: expr = 'lambda d: d' + item expr_func = eval(expr) #", "!= 200 or results is None or results[0] is None: self.log.error(\"Failed getting balance:", "in notice or 'will be delisted' in notice or 'scheduled for delisting' in", "{ 'balance': {} } \"\"\" Response cache. \"\"\" self.tick_interval_str: str \"\"\" String representation", "a 200 response, specifically empty response body, malformed response body (invalid JSON), or", "response.status if status >= 200 and status <= 399: data = await response.text()", "@staticmethod async def _get_extract_failure_reason(ex: Exception, data: Dict[str, Any]): \"\"\" Get the failure reason", "no issue occurred. \"\"\" if isinstance(ex, (TypeError, IndexError, KeyError)): reason = await Client._get_extract_failure_reason(ex,", "Dictionary of data to extract items from. Returns: (tuple): A tuple containing: list:", "asyncio.TimeoutError) as e: retry_reason = '{}: {}'.format(type(e).__name__, e) retry = True if retry:", "\"['result'][0]['T']\" ], 'getTicks', params=params, retry_data=True, retry_fail=True) if status != 200 or results is", "\"['result'][0]['T']\"] method: Name of the API method to call. params: Values of query", "on a 200 response, specifically empty response body, malformed response body (invalid JSON),", "(tuple): A tuple containing: data (str): The raw HTTP response body (may be", "= None url, headers = await self._get_request_data(method, params) while attempt < config['http_max_retries']: try:", "0.0, 'minTradeValue': 0.0, 'baseVolume': summary['Summary']['BaseVolume'], 'prevDay': prev_day, 'last': last, } return summaries async", "summary['Market']['MinTradeSize'], 'minTradeSize': 0.0, 'minTradeValue': 0.0, 'baseVolume': summary['Summary']['BaseVolume'], 'prevDay': prev_day, 'last': last, } return", "'getTicks', params=params, retry_data=True, retry_fail=True) if status != 200 or results is None or", "response list to a dict for faster lookups. This data is used for", "results is None or results[0] is None: self.log.error(\"Failed getting ticks: params {}, status", "eg. 'BTC-ETH' Returns: (tuple): A tuple containing: float: The current close price, or", "Returns: (tuple): A tuple containing: (bool): True if the exception warrants a retry,", "request. (dict): Dictionary of headers for the request, or None if no headers", "as e: ex = e results.append(None) return (results, ex) @staticmethod async def _handle_extract_exception(ex:", "\"\"\" query = API_METHODS[method]['params'].format(*params or []) if API_METHODS[method]['auth']: nonce = int(time.time() * 1000)", "dict. \"\"\" results, status, = await self.call_extract([ \"['result']\", \"['result'][0]['Market']['BaseCurrency']\", # To retry on", "invalid data items. Caution must be taken to ensure that the specified extract", "params = [order_id] results, status = await self.call_extract([ \"['success']\", \"['result']['IsOpen']\", \"['result']['Quantity']\", \"['result']['QuantityRemaining']\", \"['result']['PricePerUnit']\",", "prev_day = summary['Summary']['PrevDay'] if not prev_day: prev_day = last if notice: self.log.info(\"{} NOTICE:", "(tuple): A tuple containing: data (object): On a normal 200 response, a tuple", "Sequence[str], data: Dict[str, Any]): \"\"\" Extract items from a dictionary of data. Arguments:", "1000) api_key = config['bittrex_api_key'] api_secret = config['bittrex_api_secret'] query = 'apikey={}&nonce={}&'.format(api_key, nonce) + query", "query parameters to pass to the method. Returns: (tuple): A tuple containing: (str):", "body (may be None). On a response with a missing response body, None.", "(unlike v2). \"\"\" await self.lock.acquire() if 'marketSummariesV1' in self.cache: if time.time() - self.cache['marketSummariesV1']['time']", "self.lock.release() return None self.cache['marketSummariesV1'] = { 'time': time.time(), 'data': market_summaries } self.lock.release() return", "the API, will always return all ticks. Returns: A list of the raw", "since the v1 API is kept current (unlike v2). \"\"\" await self.lock.acquire() if", "= summary['Market']['IsActive'] notice = summary['Market']['Notice'] last = summary['Summary']['Last'] prev_day = summary['Summary']['PrevDay'] if not", "query string and calculates any needed HMAC signature to be passed in headers.", "indent=2)) if not data['success'] and retry_fail: retry = True try: reason = data['message']", "attempt += 1 await common.backoff(attempt, \"Bittrex call {}\".format(method), retry_reason) retry = False return", "or results[0] is None: self.log.error(\"Failed executing sell order request: params {}, status {},", "if no error or and unretryable error occurred. (str): Sentence fragment or formatted", "await response.text() break if (status >= 500 and status <= 599 and status", "1 await common.backoff(attempt, \"Bittrex call_json {}\".format(method), retry_reason) retry = False return (data, status)", "], 'sellLimit', params=params, log=True, retry_data=True) if status != 200 or results is None", "(if present). Arguments: data: Dict of the parsed API response. ex: Exception thrown", "None: self.log.error(\"Failed executing cancel order request: params {} status {}, results {}.\", params,", "backoff for higher-level API error conditions on a 200 response, specifically empty response", "= False attempt = 0 status = 0 data = None url, headers", "summary['Summary']['PrevDay'] if not prev_day: prev_day = last if notice: self.log.info(\"{} NOTICE: {}\", pair,", "value of 0 indicates a connection or transport failure. \"\"\" retry = False", "except json.JSONDecodeError: retry_reason = 'invalid JSON response' except KeyError: retry_reason = \"missing 'success'", "call {}\".format(method), retry_reason) retry = False return (data, status) @staticmethod async def _get_request_data(method:", "results[0] async def get_balance(self, base: str): \"\"\" \"\"\" params = [base] results, status", "HTTP client session. \"\"\" self.log = utils.logging.ChildLogger(parent=log, scope=self) \"\"\" Object logger. \"\"\" self.lock", "from. Returns: (tuple): A tuple containing: list: Result of each extracted path, or", "'will be delisted' in notice or 'scheduled for delisting' in notice: self.log.info(\"{} marked", "common.backoff(attempt, \"Bittrex call {}\".format(method), retry_reason) retry = False return (data, status) @staticmethod async", "request: params {} status {}, results {}.\", params, status, results) return None return", "200 response, a tuple containing the values for each extracted item. Any items", "method and parameter list. Forms the full URL with query string and calculates", "lookups. This data is used for batching tick updates, since the v1 API", "for invalid data items. Caution must be taken to ensure that the specified", "(tuple): A tuple containing: (bool): True if the exception warrants a retry, False", "Arguments: pair: The currency pair eg. 'BTC-ETH'. length: Not supported by the API,", "(status >= 500 and status <= 599 and status != 504) or (status", "or results is None or results[0] is None: self.log.error(\"Failed executing cancel order request:", "response.text() break if (status >= 500 and status <= 599 and status !=", "method: str, params: list=None): \"\"\" Call a Bittrex API method and parse JSON", "or transport failure. Raises: SyntaxError, NameError: If one or more of the passed", "the Bittrex API. \"\"\" raise NotImplementedError(\"Tick range not supported by the Bittrex API.\")", "params: Sequence[Any]=None): \"\"\" Get the request URL and headers for a given API", "str): \"\"\" \"\"\" params = [base] results, status = await self.call_extract([ \"['result']['Available']\", ],", "reason) @staticmethod async def _get_extract_failure_reason(ex: Exception, data: Dict[str, Any]): \"\"\" Get the failure", "range of ticks (closing values and closing times) for a pair from the", "__init__(self, session: aiohttp.ClientSession, log=utils.logging.DummyLogger()): self.session = session \"\"\" Object HTTP client session. \"\"\"", "KeyError: retry_reason = \"missing 'success' value\" retry = True if retry: attempt +=", "API response message (if present). Arguments: data: Dict of the parsed API response.", "else: reason = None retry = False return (retry, reason) @staticmethod async def", "None return (url, headers) async def call_json(self, method: str, params: list=None): \"\"\" Call", "= 'empty or missing results' return \"{} ({}: {})\".format(api_message, type(ex).__name__, ex) async def", "and headers for a given API method and parameter list. Forms the full", "!= 200 or results is None or results[0] is None: self.log.error(\"Failed getting market", "the market summaries from the Bittrex API. Returns: The market summaries dict. \"\"\"", "cached data for marketSummariesV1.\", verbosity=1) self.lock.release() return self.cache['marketSummariesV1']['data'] results, status = await self.call_extract([", "dict paths contains invalid syntax. \"\"\" retry = False attempt = 0 while", "Syntax errors in extract paths will not be retried. retry_fail: If True, will", "are correct to avoid repeating of non-idempotent operations (such as buying or selling)", "[pair, quantity, value] results, status = await self.call_extract([ \"['result']['uuid']\", ], 'sellLimit', params=params, log=True,", "Returns: (tuple): A tuple containing: float: The current close price, or None if", "formatted traceback describing the reason for retry or error, or None if no", "def cancel_order(self, pair: str, order_id: str): \"\"\" \"\"\" params = [order_id] results, status", "'path': 'v2.0/pub/market/getLatestTick', 'params': 'marketName={}&tickInterval={}', 'auth': False }, 'getTicker': { 'path': 'v1.1/public/getticker', 'params': 'market={}',", "(retry, reason) @staticmethod async def _get_extract_failure_reason(ex: Exception, data: Dict[str, Any]): \"\"\" Get the", "as e: retry_reason = '{}: {}'.format(type(e).__name__, e) retry = True if retry: attempt", "(tuple): A tuple containing: float: The current close price, or None if an", "missing fields \"['result'][0]['BaseVolume']\", \"['result'][0]['PrevDay']\", ], 'getMarketSummariesV1', retry_data=True) if status == 200 and results", "results is not None and results[0] is not None: market_summaries = {} for", "== false (missing message)\" if not retry: results, ex = await self._extract_items(extract, data)", "all retries, or had syntax errors in extract paths will be set to", "def _get_extract_failure_reason(ex: Exception, data: Dict[str, Any]): \"\"\" Get the failure reason from the", "{ 'path': 'v1.1/market/buylimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True }, 'sellLimit': { 'path': 'v1.1/market/selllimit', 'params':", "not retry: try: data = json.loads(raw_data) _ = data['success'] return (data, status) except", "{ 'active': active, 'baseCurrency': summary['Market']['BaseCurrency'], 'minTradeQty': summary['Market']['MinTradeSize'], 'minTradeSize': 0.0, 'minTradeValue': 0.0, 'baseVolume': summary['Summary']['BaseVolume'],", "else: raise ValueError(\"Unsupported tick interval: {}\".format(config['tick_interval_secs'])) async def call(self, method: str, params: Sequence[Any]=None):", "dict containing the parsed JSON response. On a non-200 response, the raw response", "retry = False elif isinstance(ex, (SyntaxError, NameError)): reason = \"{}: {}\\n{}\".format(type(ex).__name__, ex, ''.join(traceback.format_tb(ex.__traceback__)))", "= { 'active': active, 'baseCurrency': summary['Market']['BaseCurrency'], 'minTradeQty': summary['Market']['MinTradeSize'], 'minTradeSize': 0.0, 'minTradeValue': 0.0, 'baseVolume':", "<= 599 and status != 504) or (status in [0, 408, 429]): retry_reason", "Get v1 market summaries from the API, cached for the current tick interval.", "[\"['result'][0]['C']\", \"['result'][0]['T']\"] method: Name of the API method to call. params: Values of", "or None if a syntax or or extraction error occurred. Exception: The last", "of the API method to call. params: Values of query parameters to pass", "The raw HTTP response body (may be None). status (int): The HTTP response", "connection or transport failure. \"\"\" retry = False attempt = 0 status =", "data) retry, reason = await self._handle_extract_exception(ex, data, retry_data) if retry: attempt += 1", "eg. 'BTC-ETH'. length: Not supported by the API, will always return all ticks.", "for the request, or None if no headers are required. \"\"\" query =", "the dictionary paths of the response data items to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"]", "results is None or results[0] is None: self.log.error(\"Failed executing cancel order request: params", "API is kept current (unlike v2). \"\"\" await self.lock.acquire() if 'marketSummariesV1' in self.cache:", "least one element exists \"['result'][0]['T']\" ], 'getTicks', params=params, retry_data=True, retry_fail=True) if status !=", "return all ticks. Returns: A list of the raw tick data from the", "= result else: self.log.error(\"Failed getting v1 market summaries: status {}, results {}.\", status,", "False summaries[pair] = { 'active': active, 'baseCurrency': summary['Market']['BaseCurrency'], 'minTradeQty': summary['Market']['MinTradeSize'], 'minTradeSize': 0.0, 'minTradeValue':", "fragment or formatted traceback describing the reason for retry or error, or None", "'v1.1/account/getbalance', 'params': 'currency={}', 'auth': True }, } class Client(api.Client): \"\"\" Client for interacting", "last = summary['Summary']['Last'] prev_day = summary['Summary']['PrevDay'] if not prev_day: prev_day = last if", "= [base] results, status = await self.call_extract([ \"['result']['Available']\", ], 'getBalance', params=params, log=True, retry_data=True)", "summaries from the API, cached for the current tick interval. Converts the response", "return (data, status) @staticmethod async def _get_request_data(method: str, params: Sequence[Any]=None): \"\"\" Get the", "inactive due to pending removal.\", pair) active = False summaries[pair] = { 'active':", "None for tick in results[0]: close_datetime = datetime.strptime(tick['T'], TIME_FORMAT) tick['T'] = close_datetime.replace(tzinfo=timezone.utc).timestamp() return", "\"['result']\", \"['result'][0]['Market']['BaseCurrency']\", # To retry on any missing fields \"['result'][0]['Market']['MinTradeSize']\", \"['result'][0]['Market']['IsActive']\", \"['result'][0]['Market']['Notice']\", \"['result'][0]['Summary']['MarketName']\",", "KeyError, SyntaxError, NameError) as e: ex = e results.append(None) return (results, ex) @staticmethod", "failure. \"\"\" retry = False attempt = 0 status = 0 data =", "(TypeError, IndexError, KeyError, SyntaxError, NameError) as e: ex = e results.append(None) return (results,", "the API JSON response. This is optional as some responses can be quite", "def sell_limit(self, pair: str, quantity: float, value: float): \"\"\" \"\"\" params = [pair,", "be retried, false otherwise. Returns: (tuple): A tuple containing: (bool): True if the", "item. Any items that failed to be extracted after exhausting all retries, or", "except (aiohttp.ClientConnectionError, aiohttp.ClientPayloadError, asyncio.TimeoutError) as e: retry_reason = '{}: {}'.format(type(e).__name__, e) retry =", "'marketSummariesV1' in self.cache: if time.time() - self.cache['marketSummariesV1']['time'] < config['tick_interval_secs']: self.log.debug(\"Returning cached data for", "= await Client._get_extract_failure_reason(ex, data) retry = False else: reason = None retry =", "results.append(expr_func(data)) except (TypeError, IndexError, KeyError, SyntaxError, NameError) as e: ex = e results.append(None)", "(invalid JSON), or missing 'success' value. Arguments: method: Name of the API method", "HTTP response body (may be None). status (int): The HTTP response status code.", "params, status, results) return None for tick in results[0]: close_datetime = datetime.strptime(tick['T'], TIME_FORMAT)", "200 response, specifically empty response body, malformed response body (invalid JSON), or missing", "self.cache = { 'balance': {} } \"\"\" Response cache. \"\"\" self.tick_interval_str: str \"\"\"", "results[2], 'remaining': results[3], 'value': results[4], 'fees': results[5], } async def cancel_order(self, pair: str,", "be extracted after exhausting all retries, or had syntax errors in extract paths", "tuple containing the values for each extracted item. Any items that failed to", "{ 'path': 'v1.1/market/selllimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True }, 'cancelOrder': { 'path': 'v1.1/market/cancel', 'params':", "= last if notice: self.log.info(\"{} NOTICE: {}\", pair, notice) if 'will be removed'", "(url, headers) async def call_json(self, method: str, params: list=None): \"\"\" Call a Bittrex", "status, results) return None balance = results[0] self.cache['balance'][base] = { 'time': time.time(), 'data':", "status) except json.JSONDecodeError: retry_reason = 'invalid JSON response' except KeyError: retry_reason = \"missing", "summaries[pair] = { 'active': active, 'baseCurrency': summary['Market']['BaseCurrency'], 'minTradeQty': summary['Market']['MinTradeSize'], 'minTradeSize': 0.0, 'minTradeValue': 0.0,", "None or results[0] is None: self.log.error(\"Failed getting market summaries: status {}, results {}.\",", "extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] method: Name of the API method to call. params:", "= close_datetime.replace(tzinfo=timezone.utc).timestamp() return results[0] async def get_tick_range(self, pair: str, start_time: float, end_time: float)", "API. \"\"\" def __init__(self, session: aiohttp.ClientSession, log=utils.logging.DummyLogger()): self.session = session \"\"\" Object HTTP", "the raw tick data from the API, or None if an error occurred", "pair: str, start_time: float, end_time: float) -> List[Dict[str, Any]]: \"\"\" Get a range", "Client._get_extract_failure_reason(ex, data) retry = False else: reason = None retry = False return", "extract dict keys are correct to avoid repeating of non-idempotent operations (such as", "False else: break if reason is not None: self.log.error(\"Giving up on: {}\", reason)", "a currency pair from the API. Arguments: pair: Currency pair name eg. 'BTC-ETH'", "executing buy order request: params {}, status {}, results {}.\", params, status, results)", "return None return results[0] async def get_order(self, pair: str, order_id: str): \"\"\" \"\"\"", "def buy_limit(self, pair: str, quantity: float, value: float): \"\"\" \"\"\" params = [pair,", "= 0 data = None while attempt < config['http_max_retries']: raw_data, status = await", "self.call_extract([ \"['success']\" ], 'cancelOrder', params=params, log=True, retry_data=True) if status != 200 or results", "syntax. \"\"\" retry = False attempt = 0 while attempt <= config['api_max_retries']: data,", "summary['Summary']['Last'] prev_day = summary['Summary']['PrevDay'] if not prev_day: prev_day = last if notice: self.log.info(\"{}", "and exponential backoff for invalid data items. Caution must be taken to ensure", "values and closing times) for a pair from the Bittrex API. \"\"\" raise", "\"\"\" self.log = utils.logging.ChildLogger(parent=log, scope=self) \"\"\" Object logger. \"\"\" self.lock = asyncio.Lock() \"\"\"", "False return (data, status) async def call_extract(self, extract: Sequence[str], method: str, params: Sequence[Any]=None,", "from the Bittrex API. Returns: The market summaries dict. \"\"\" results, status, =", "as response: status = response.status if status >= 200 and status <= 399:", "params=params, retry_data=True, retry_fail=True) if status != 200 or results is None or results[0]", "not be retried. retry_fail: If True, will perform backoff and retry on explicit", "(TypeError, IndexError, KeyError)): reason = await Client._get_extract_failure_reason(ex, data) if retry_data and data['success']: retry", "+= 1 await common.backoff(attempt, \"Bittrex call_json {}\".format(method), retry_reason) retry = False return (data,", "that occurred during extraction, or None if no exception occurred. \"\"\" ex =", "= True if retry: attempt += 1 await common.backoff(attempt, \"Bittrex call_json {}\".format(method), retry_reason)", "float, end_time: float) -> List[Dict[str, Any]]: \"\"\" Get a range of ticks (closing", "(such as buying or selling) so should always be tested with retry=False (the", "the extraction attempt. \"\"\" if 'message' in data and data['message'] and data['message'] !=", "'fiveMin' else: raise ValueError(\"Unsupported tick interval: {}\".format(config['tick_interval_secs'])) async def call(self, method: str, params:", "(raw_data, status) if raw_data is None: retry_reason = \"'None' on successful response\" retry", "}, 'buyLimit': { 'path': 'v1.1/market/buylimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True }, 'sellLimit': { 'path':", "the given extraction exception and API response message (if present). Arguments: data: Dict", "'params': 'uuid={}', 'auth': True }, 'getOrder': { 'path': 'v1.1/account/getorder', 'params': 'uuid={}', 'auth': True", "\"\"\" Lock used for syncing access to API data. \"\"\" self.cache = {", "backoff for HTTP level error conditions. Arguments: method: Name of the API method", "NameError) as e: ex = e results.append(None) return (results, ex) @staticmethod async def", "from the API, or None if an error occurred or no ticks are", "and parse JSON response. Implements retry and exponential backoff for higher-level API error", "import datetime, timezone from typing import Any, Dict, List, Sequence, Tuple import api", "Client._get_extract_failure_reason(ex, data) if retry_data and data['success']: retry = True else: retry = False", "be None). status (int): The HTTP response status code. A value of 0", "headers = await self._get_request_data(method, params) while attempt < config['http_max_retries']: try: async with self.session.get(url,", "signature = hmac.new(api_secret.encode(), url.encode(), hashlib.sha512).hexdigest() headers = {'apisign': signature} else: url = API_URL.format(API_METHODS[method]['path'],", "break if (status >= 500 and status <= 599 and status != 504)", "api_message = 'empty or missing results' return \"{} ({}: {})\".format(api_message, type(ex).__name__, ex) async", "break if reason is not None: self.log.error(\"Giving up on: {}\", reason) return (tuple(results),", "0 status = 0 data = None url, headers = await self._get_request_data(method, params)", "values for each extracted item. Any items that failed to be extracted after", "in notice or 'scheduled for delisting' in notice: self.log.info(\"{} marked as inactive due", "not results[0]: self.log.error(\"Failed getting order: params{}, status {}, results {}.\", params, status, results)", "calculates any needed HMAC signature to be passed in headers. Arguments: method: Name", "to the method. Returns: (tuple): A tuple containing: (str): Full URL for the", "status <= 399: data = await response.text() break if (status >= 500 and", "{} } \"\"\" Response cache. \"\"\" self.tick_interval_str: str \"\"\" String representation of the", "'auth': False }, 'buyLimit': { 'path': 'v1.1/market/buylimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True }, 'sellLimit':", "HTTP level error conditions. Arguments: method: Name of the API method to call.", "for a pair from the Bittrex API. \"\"\" raise NotImplementedError(\"Tick range not supported", "else: self.log.error('Got non-retryable status {}.', status) data = await response.text() break except (aiohttp.ClientConnectionError,", "hour volume, or None if an error occurred. \"\"\" market_summaries = await self._get_market_summaries_v1()", "= False return (data, status) @staticmethod async def _get_request_data(method: str, params: Sequence[Any]=None): \"\"\"", "or data is None: self.log.error(\"Failed on API method '{}({})': status {}, data {}\",", "None retry = False return (retry, reason) @staticmethod async def _get_extract_failure_reason(ex: Exception, data:", "status) if log: self.log.debug(\"API method '{}({})' response:\\n{}\", method, params, json.dumps(data, indent=2)) if not", "\"['result'][0]['Summary']['PrevDay']\", \"['result'][0]['Summary']['Last']\", ], 'getMarketSummaries', retry_data=True, retry_fail=True) if status != 200 or results is", "'params': '', 'auth': False }, 'getTicks': { 'path': 'v2.0/pub/market/getTicks', 'params': 'marketName={}&tickInterval={}', 'auth': False", "raw HTTP response body (may be None). status (int): The HTTP response status", "access to API data. \"\"\" self.cache = { 'balance': {} } \"\"\" Response", "active = summary['Market']['IsActive'] notice = summary['Market']['Notice'] last = summary['Summary']['Last'] prev_day = summary['Summary']['PrevDay'] if", "await self._get_market_summaries_v1() if market_summaries is None: return None return (market_summaries[pair]['Last'], market_summaries[pair]['BaseVolume']) async def", "to None. On a non-200 response, the raw response body (may be None).", "self.cache['balance'][base] = { 'time': time.time(), 'data': balance } return balance async def _get_market_summaries_v1(self):", "\"\"\" retry = False attempt = 0 status = 0 data = None", "eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] data: Dictionary of data to extract items from. Returns: (tuple):", "\"\"\" params = [order_id] results, status = await self.call_extract([ \"['success']\", \"['result']['IsOpen']\", \"['result']['Quantity']\", \"['result']['QuantityRemaining']\",", "A value of 0 indicates a connection or transport failure. Raises: SyntaxError, NameError:", "start_time: float, end_time: float) -> List[Dict[str, Any]]: \"\"\" Get a range of ticks", "log=True, retry_data=True) if status != 200 or results is None or not results[0]:", "common.backoff(attempt, \"Bittrex call_json {}\".format(method), retry_reason) retry = False return (data, status) async def", "pass to the method. retry_data: If True, will perform backoff and retry on", "= await self.call_extract([ \"['result']\", \"['result'][0]['Last']\", # For retry of missing fields \"['result'][0]['BaseVolume']\", \"['result'][0]['PrevDay']\",", "Arguments: extract: A list of strings representing the dictionary paths of the response", "Exception thrown as a result of the extraction attempt. \"\"\" if 'message' in", "results is None or results[0] is None: self.log.error(\"Failed executing buy order request: params", "200 and results is not None and results[0] is not None: market_summaries =", "Exception returned from :meth:`_extract_items`. data: Dictionary of data passed to :meth:`_extract_items`. retry_data: True", "and results is not None and results[0] is not None: market_summaries = {}", "{}.\", params, status, results) return None return results[0] async def get_balance(self, base: str):", "= data['success'] return (data, status) except json.JSONDecodeError: retry_reason = 'invalid JSON response' except", "ex: Exception returned from :meth:`_extract_items`. data: Dictionary of data passed to :meth:`_extract_items`. retry_data:", "\"['result']['QuantityRemaining']\", \"['result']['PricePerUnit']\", \"['result']['CommissionPaid']\", ], 'getOrder', params=params, log=True, retry_data=True) if status != 200 or", "cancel_order(self, pair: str, order_id: str): \"\"\" \"\"\" params = [order_id] results, status =", "call. params: Values of query parameters to pass to the method. retry_data: If", "or 'scheduled for delisting' in notice: self.log.info(\"{} marked as inactive due to pending", "or error, or None if no issue occurred. \"\"\" if isinstance(ex, (TypeError, IndexError,", "getting order: params{}, status {}, results {}.\", params, status, results) return None return", "Any]): \"\"\" Extract items from a dictionary of data. Arguments: extract: List of", "data['message'] != '': api_message = data['message'] else: api_message = 'empty or missing results'", "message)\" if not retry: results, ex = await self._extract_items(extract, data) retry, reason =", "extract data items from its JSON response. Implements retry and exponential backoff for", "'cancelOrder': { 'path': 'v1.1/market/cancel', 'params': 'uuid={}', 'auth': True }, 'getOrder': { 'path': 'v1.1/account/getorder',", "self.call_extract([ \"['result']['uuid']\", ], 'sellLimit', params=params, log=True, retry_data=True) if status != 200 or results", "explicit failure response from the API. log: If True, will log the API", "v1 market summaries: status {}, results {}.\", status, results) if 'marketSummariesV1' in self.cache:", "Name of the API method to call. params: Values of query parameters to", "1 await common.backoff(attempt, \"Bittrex call {}\".format(method), retry_reason) retry = False return (data, status)", "tick in results[0]: close_datetime = datetime.strptime(tick['T'], TIME_FORMAT) tick['T'] = close_datetime.replace(tzinfo=timezone.utc).timestamp() return results[0] async", "status, = await self.call_extract([ \"['result']\", \"['result'][0]['C']\", # To retry if not at least", "or no ticks are available. \"\"\" params = [pair, self.tick_interval_str] results, status, =", "by the API, will always return all ticks. Returns: A list of the", "error conditions. Arguments: method: Name of the API method to call. params: Values", "value] results, status = await self.call_extract([ \"['result']['uuid']\", ], 'buyLimit', params=params, log=True) if status", "status = 0 data = None url, headers = await self._get_request_data(method, params) while", "extract: Sequence[str], method: str, params: Sequence[Any]=None, retry_data=False, retry_fail=False, log=False): \"\"\" Call a Bittrex", "a Bittrex API method and extract data items from its JSON response. Implements", "class Client(api.Client): \"\"\" Client for interacting with the Bittrex API. \"\"\" def __init__(self,", "no ticks are available. \"\"\" params = [pair, self.tick_interval_str] results, status, = await", "async def _get_request_data(method: str, params: Sequence[Any]=None): \"\"\" Get the request URL and headers", "with the Bittrex API. \"\"\" def __init__(self, session: aiohttp.ClientSession, log=utils.logging.DummyLogger()): self.session = session", "syncing access to API data. \"\"\" self.cache = { 'balance': {} } \"\"\"", "To retry on any missing fields \"['result'][0]['Market']['MinTradeSize']\", \"['result'][0]['Market']['IsActive']\", \"['result'][0]['Market']['Notice']\", \"['result'][0]['Summary']['MarketName']\", \"['result'][0]['Summary']['BaseVolume']\", \"['result'][0]['Summary']['PrevDay']\", \"['result'][0]['Summary']['Last']\",", "await common.backoff(attempt, \"Bittrex call_json {}\".format(method), retry_reason) retry = False return (data, status) async", "\"Bittrex call {}\".format(method), retry_reason) retry = False return (data, status) @staticmethod async def", "'getTicks': { 'path': 'v2.0/pub/market/getTicks', 'params': 'marketName={}&tickInterval={}', 'auth': False }, 'getLatestTick': { 'path': 'v2.0/pub/market/getLatestTick',", "If True, will perform backoff and retry on empty or missing data items.", "!= 200 or data is None: self.log.error(\"Failed on API method '{}({})': status {},", "'uuid={}', 'auth': True }, 'getOrder': { 'path': 'v1.1/account/getorder', 'params': 'uuid={}', 'auth': True },", "status (int): The HTTP response status code. A value of 0 indicates a", "optional as some responses can be quite large. Returns: (tuple): A tuple containing:", "self.log.error(\"Failed getting v1 market summaries: status {}, results {}.\", status, results) if 'marketSummariesV1'", "message)\" except KeyError: reason = \"success == false (missing message)\" if not retry:", "failure reason from the given extraction exception and API response message (if present).", "], 'getTicks', params=params, retry_data=True, retry_fail=True) if status != 200 or results is None", "some responses can be quite large. Returns: (tuple): A tuple containing: data (object):", "or more of the passed extract dict paths contains invalid syntax. \"\"\" retry", "produced from an extract operation. Arguments: ex: Exception returned from :meth:`_extract_items`. data: Dictionary", "= config['bittrex_api_secret'] query = 'apikey={}&nonce={}&'.format(api_key, nonce) + query url = API_URL.format(API_METHODS[method]['path'], query) signature", "# To retry if not at least one element exists \"['result'][0]['T']\" ], 'getTicks',", "and closing times) for a pair from the Bittrex API. \"\"\" raise NotImplementedError(\"Tick", "market_summaries = await self._get_market_summaries_v1() if market_summaries is None: return None return (market_summaries[pair]['Last'], market_summaries[pair]['BaseVolume'])", "e: retry_reason = '{}: {}'.format(type(e).__name__, e) retry = True if retry: attempt +=", "query parameters to pass to the method. retry_data: If True, will perform backoff", "balance async def _get_market_summaries_v1(self): \"\"\" Get v1 market summaries from the API, cached", "self.session = session \"\"\" Object HTTP client session. \"\"\" self.log = utils.logging.ChildLogger(parent=log, scope=self)", "API_METHODS[method]['params'].format(*params or []) if API_METHODS[method]['auth']: nonce = int(time.time() * 1000) api_key = config['bittrex_api_key']", "pair from the API. Arguments: pair: Currency pair name eg. 'BTC-ETH' Returns: (tuple):", "else: url = API_URL.format(API_METHODS[method]['path'], query) headers = None return (url, headers) async def", "\"{}: {}\\n{}\".format(type(ex).__name__, ex, ''.join(traceback.format_tb(ex.__traceback__))) retry = False elif ex is not None: reason", "in self.cache: self.cache['marketSummariesV1']['time'] = time.time() self.lock.release() return self.cache['marketSummariesV1']['data'] else: self.lock.release() return None self.cache['marketSummariesV1']", "\"\"\" params = [pair, quantity, value] results, status = await self.call_extract([ \"['result']['uuid']\", ],", "of ticks (closing values and closing times) for a pair from the Bittrex", "'path': 'v1.1/market/selllimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True }, 'cancelOrder': { 'path': 'v1.1/market/cancel', 'params': 'uuid={}',", "retry_fail: retry = True try: reason = data['message'] if data['message'] != '' else", "data['success'] return (data, status) except json.JSONDecodeError: retry_reason = 'invalid JSON response' except KeyError:", "json.loads(raw_data) _ = data['success'] return (data, status) except json.JSONDecodeError: retry_reason = 'invalid JSON", "json.JSONDecodeError: retry_reason = 'invalid JSON response' except KeyError: retry_reason = \"missing 'success' value\"", "from the Bittrex API. Arguments: pair: The currency pair eg. 'BTC-ETH'. length: Not", "str, start_time: float, end_time: float) -> List[Dict[str, Any]]: \"\"\" Get a range of", "response body (may be None). On a response with a missing response body,", "session. \"\"\" self.log = utils.logging.ChildLogger(parent=log, scope=self) \"\"\" Object logger. \"\"\" self.lock = asyncio.Lock()", "'baseVolume': summary['Summary']['BaseVolume'], 'prevDay': prev_day, 'last': last, } return summaries async def get_ticks(self, pair:", "status {}, results {}.\", params, status, results) return None for tick in results[0]:", "str, params: Sequence[Any]=None): \"\"\" Get the request URL and headers for a given", "Call a Bittrex API method. Implements retry and exponentional backoff for HTTP level", "method: str, params: Sequence[Any]=None, retry_data=False, retry_fail=False, log=False): \"\"\" Call a Bittrex API method", "(may be None). On a response with a missing response body, None. status", "params = [pair, quantity, value] results, status = await self.call_extract([ \"['result']['uuid']\", ], 'buyLimit',", "time.time() - self.cache['marketSummariesV1']['time'] < config['tick_interval_secs']: self.log.debug(\"Returning cached data for marketSummariesV1.\", verbosity=1) self.lock.release() return", "'v2.0/pub/market/getTicks', 'params': 'marketName={}&tickInterval={}', 'auth': False }, 'getLatestTick': { 'path': 'v2.0/pub/market/getLatestTick', 'params': 'marketName={}&tickInterval={}', 'auth':", "response data items to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] method: Name of the API", "'params': 'marketName={}&tickInterval={}', 'auth': False }, 'getTicker': { 'path': 'v1.1/public/getticker', 'params': 'market={}', 'auth': False", "params) if status != 200: return (raw_data, status) if raw_data is None: retry_reason", "the method. retry_data: If True, will perform backoff and retry on empty or", "current tick interval. Converts the response list to a dict for faster lookups.", "params=params, log=True, retry_data=True) if status != 200 or results is None or results[0]", "None: self.log.error(\"Failed executing sell order request: params {}, status {}, results {}.\", params,", "the Bittrex API.\") async def get_last_values(self, pair: str) -> Tuple[float, float]: \"\"\" Get", "is None: self.log.error(\"Failed getting market summaries: status {}, results {}.\", status, results) return", "def call_json(self, method: str, params: list=None): \"\"\" Call a Bittrex API method and", "and closing times) for a pair from the Bittrex API. Arguments: pair: The", "retry_fail: If True, will perform backoff and retry on explicit failure response from", "client session. \"\"\" self.log = utils.logging.ChildLogger(parent=log, scope=self) \"\"\" Object logger. \"\"\" self.lock =", "missing response body, None. status (int): The HTTP response status code. A value", "Dict[str, Any]): \"\"\" Extract items from a dictionary of data. Arguments: extract: List", "syntax or or extraction error occurred. Exception: The last exception that occurred during", "retry or error, or None if no issue occurred. \"\"\" if isinstance(ex, (TypeError,", "the Bittrex API. Arguments: pair: The currency pair eg. 'BTC-ETH'. length: Not supported", "exception and API response message (if present). Arguments: data: Dict of the parsed", "or results is None or not results[0]: self.log.error(\"Failed getting order: params{}, status {},", "to pass to the method. Returns: (tuple): A tuple containing: data (object): On", "not None and results[0] is not None: market_summaries = {} for result in", "if no headers are required. \"\"\" query = API_METHODS[method]['params'].format(*params or []) if API_METHODS[method]['auth']:", "market_summaries is None: return None return (market_summaries[pair]['Last'], market_summaries[pair]['BaseVolume']) async def buy_limit(self, pair: str,", "await response.text() break except (aiohttp.ClientConnectionError, aiohttp.ClientPayloadError, asyncio.TimeoutError) as e: retry_reason = '{}: {}'.format(type(e).__name__,", "\"success == false (missing message)\" if not retry: results, ex = await self._extract_items(extract,", "IndexError, KeyError, SyntaxError, NameError) as e: ex = e results.append(None) return (results, ex)", "self.cache['marketSummariesV1']['time'] < config['tick_interval_secs']: self.log.debug(\"Returning cached data for marketSummariesV1.\", verbosity=1) self.lock.release() return self.cache['marketSummariesV1']['data'] results,", "'getTicker': { 'path': 'v1.1/public/getticker', 'params': 'market={}', 'auth': False }, 'buyLimit': { 'path': 'v1.1/market/buylimit',", "params) if status != 200 or data is None: self.log.error(\"Failed on API method", "reason = await Client._get_extract_failure_reason(ex, data) if retry_data and data['success']: retry = True else:", "API, will always return all ticks. Returns: A list of the raw tick", "params: Sequence[Any]=None, retry_data=False, retry_fail=False, log=False): \"\"\" Call a Bittrex API method and extract", "be tested with retry=False (the default) first. Arguments: extract: A list of strings", "{}, data {}\", method, params, status, data) return (data, status) if log: self.log.debug(\"API", "if a syntax or or extraction error occurred. Exception: The last exception that", "= True else: retry = False elif isinstance(ex, (SyntaxError, NameError)): reason = \"{}:", "= False else: break if reason is not None: self.log.error(\"Giving up on: {}\",", "= False return (data, status) async def call_extract(self, extract: Sequence[str], method: str, params:", "results) if 'marketSummariesV1' in self.cache: self.cache['marketSummariesV1']['time'] = time.time() self.lock.release() return self.cache['marketSummariesV1']['data'] else: self.lock.release()", "failure response from the API. log: If True, will log the API JSON", "timezone from typing import Any, Dict, List, Sequence, Tuple import api import utils", "Sequence[Any]=None, retry_data=False, retry_fail=False, log=False): \"\"\" Call a Bittrex API method and extract data", "a pair from the Bittrex API. \"\"\" raise NotImplementedError(\"Tick range not supported by", "None or results[0] is None: self.log.error(\"Failed executing cancel order request: params {} status", "results[0] is None: self.log.error(\"Failed executing buy order request: params {}, status {}, results", "return results[0] async def get_balance(self, base: str): \"\"\" \"\"\" params = [base] results,", "A tuple containing: data (str): The raw HTTP response body (may be None).", "data: Dict[str, Any]): \"\"\" Get the failure reason from the given extraction exception", "parameters to pass to the method. Returns: (tuple): A tuple containing: (str): Full", "On a 200 response with a missing response body, None. status (int): The", "of headers for the request, or None if no headers are required. \"\"\"", "buying or selling) so should always be tested with retry=False (the default) first.", "result in results[0]: market_summaries[result['MarketName']] = result else: self.log.error(\"Failed getting v1 market summaries: status", "passed to :meth:`_extract_items`. retry_data: True if missing data should be retried, false otherwise.", "'empty or missing results' return \"{} ({}: {})\".format(api_message, type(ex).__name__, ex) async def get_market_summaries(self)", "attempt = 0 while attempt <= config['api_max_retries']: data, status = await self.call_json(method, params)", "e results.append(None) return (results, ex) @staticmethod async def _handle_extract_exception(ex: Exception, data: Dict[str, Any],", "results, ex = await self._extract_items(extract, data) retry, reason = await self._handle_extract_exception(ex, data, retry_data)", "API data. \"\"\" self.cache = { 'balance': {} } \"\"\" Response cache. \"\"\"", "True if not retry: try: data = json.loads(raw_data) _ = data['success'] return (data,", "import api import utils import common import configuration import aiohttp config = configuration.config", "Currency pair name eg. 'BTC-ETH' Returns: (tuple): A tuple containing: float: The current", "transport failure. \"\"\" retry = False attempt = 0 status = 0 data", "response body (may be None). status (int): The HTTP response status code. A", "= await self.call_extract([ \"['result']['uuid']\", ], 'sellLimit', params=params, log=True, retry_data=True) if status != 200", "self.cache: self.cache['marketSummariesV1']['time'] = time.time() self.lock.release() return self.cache['marketSummariesV1']['data'] else: self.lock.release() return None self.cache['marketSummariesV1'] =", "\"['success']\", \"['result']['IsOpen']\", \"['result']['Quantity']\", \"['result']['QuantityRemaining']\", \"['result']['PricePerUnit']\", \"['result']['CommissionPaid']\", ], 'getOrder', params=params, log=True, retry_data=True) if status", "url, headers = await self._get_request_data(method, params) while attempt < config['http_max_retries']: try: async with", "'params': 'uuid={}', 'auth': True }, 'getBalance': { 'path': 'v1.1/account/getbalance', 'params': 'currency={}', 'auth': True", "backoff and retry on empty or missing data items. Syntax errors in extract", "of strings representing the dictionary paths of the response data items to extract,", "required. \"\"\" query = API_METHODS[method]['params'].format(*params or []) if API_METHODS[method]['auth']: nonce = int(time.time() *", "retry = False else: break if reason is not None: self.log.error(\"Giving up on:", "}, 'cancelOrder': { 'path': 'v1.1/market/cancel', 'params': 'uuid={}', 'auth': True }, 'getOrder': { 'path':", "or transport failure. \"\"\" retry = False attempt = 0 status = 0", "get_balance(self, base: str): \"\"\" \"\"\" params = [base] results, status = await self.call_extract([", "None balance = results[0] self.cache['balance'][base] = { 'time': time.time(), 'data': balance } return", "\"['result'][0]['T']\"] data: Dictionary of data to extract items from. Returns: (tuple): A tuple", "or or extraction error occurred. Exception: The last exception that occurred during extraction,", "'path': 'v2.0/pub/markets/getMarketSummaries', 'params': '', 'auth': False }, 'getMarketSummariesV1': { 'path': 'v1.1/public/getMarketSummaries', 'params': '',", "extracted item. Any items that failed to be extracted after exhausting all retries,", "if the exception warrants a retry, False if no error or and unretryable", "'cancelOrder', params=params, log=True, retry_data=True) if status != 200 or results is None or", "or missing data items. Syntax errors in extract paths will not be retried.", "market summaries: status {}, results {}.\", status, results) if 'marketSummariesV1' in self.cache: self.cache['marketSummariesV1']['time']", "signature to be passed in headers. Arguments: method: Name of the API method", "and unretryable error occurred. (str): Sentence fragment or formatted traceback describing the reason", "None). On a 200 response with a missing response body, None. status (int):", "or not results[0]: self.log.error(\"Failed getting order: params{}, status {}, results {}.\", params, status,", "'v1.1/public/getMarketSummaries', 'params': '', 'auth': False }, 'getTicks': { 'path': 'v2.0/pub/market/getTicks', 'params': 'marketName={}&tickInterval={}', 'auth':", "[0, 408, 429]): retry_reason = 'status {}'.format(status) retry = True else: self.log.error('Got non-retryable", "so should always be tested with retry=False (the default) first. Arguments: extract: A", "'path': 'v1.1/account/getbalance', 'params': 'currency={}', 'auth': True }, } class Client(api.Client): \"\"\" Client for", "currency pair eg. 'BTC-ETH'. length: Not supported by the API, will always return", "(may be None). status (int): The HTTP response status code. A value of", "false (missing message)\" if not retry: results, ex = await self._extract_items(extract, data) retry,", "399: data = await response.text() break if (status >= 500 and status <=", "[\"['result'][0]['C']\", \"['result'][0]['T']\"] data: Dictionary of data to extract items from. Returns: (tuple): A", "query url = API_URL.format(API_METHODS[method]['path'], query) signature = hmac.new(api_secret.encode(), url.encode(), hashlib.sha512).hexdigest() headers = {'apisign':", "successful response\" retry = True if not retry: try: data = json.loads(raw_data) _", "up on: {}\", reason) return (tuple(results), status) @staticmethod async def _extract_items(extract: Sequence[str], data:", "0 data = None while attempt < config['http_max_retries']: raw_data, status = await self.call(method,", "], 'getOrder', params=params, log=True, retry_data=True) if status != 200 or results is None", "= None return (url, headers) async def call_json(self, method: str, params: list=None): \"\"\"", "None if no headers are required. \"\"\" query = API_METHODS[method]['params'].format(*params or []) if", "_handle_extract_exception(ex: Exception, data: Dict[str, Any], retry_data: bool): \"\"\" Handle any exception produced from", "None: market_summaries = {} for result in results[0]: market_summaries[result['MarketName']] = result else: self.log.error(\"Failed", "URL with query string and calculates any needed HMAC signature to be passed", "or selling) so should always be tested with retry=False (the default) first. Arguments:", "will be set to None. On a non-200 response, the raw response body", "active = False summaries[pair] = { 'active': active, 'baseCurrency': summary['Market']['BaseCurrency'], 'minTradeQty': summary['Market']['MinTradeSize'], 'minTradeSize':", "pair: str, quantity: float, value: float): \"\"\" \"\"\" params = [pair, quantity, value]", "= False else: reason = None retry = False return (retry, reason) @staticmethod", "with retry=False (the default) first. Arguments: extract: A list of strings representing the", "\"Bittrex call_json {}\".format(method), retry_reason) retry = False return (data, status) async def call_extract(self,", "* 1000) api_key = config['bittrex_api_key'] api_secret = config['bittrex_api_secret'] query = 'apikey={}&nonce={}&'.format(api_key, nonce) +", "retry, reason = await self._handle_extract_exception(ex, data, retry_data) if retry: attempt += 1 await", "call. params: Values of query parameters to pass to the method. Returns: (tuple):", "Bittrex API. Returns: The market summaries dict. \"\"\" results, status, = await self.call_extract([", "results[1], 'quantity': results[2], 'remaining': results[3], 'value': results[4], 'fees': results[5], } async def cancel_order(self,", "retry_reason = '{}: {}'.format(type(e).__name__, e) retry = True if retry: attempt += 1", "status != 200: return (raw_data, status) if raw_data is None: retry_reason = \"'None'", "for each extracted item. Any items that failed to be extracted after exhausting", "at least one element exists \"['result'][0]['T']\" ], 'getTicks', params=params, retry_data=True, retry_fail=True) if status", "list of strings representing the dictionary paths of the response data items to", "'currency={}', 'auth': True }, } class Client(api.Client): \"\"\" Client for interacting with the", "\"['result']['uuid']\", ], 'buyLimit', params=params, log=True) if status != 200 or results is None", "retry_reason) retry = False return (data, status) @staticmethod async def _get_request_data(method: str, params:", "60: self.tick_interval_str = 'oneMin' elif config['tick_interval_secs'] == 300: self.tick_interval_str = 'fiveMin' else: raise", "summary['Market']['Notice'] last = summary['Summary']['Last'] prev_day = summary['Summary']['PrevDay'] if not prev_day: prev_day = last", "results {}.\", params, status, results) return None return results[0] async def get_balance(self, base:", "= API_URL.format(API_METHODS[method]['path'], query) headers = None return (url, headers) async def call_json(self, method:", "retry of missing fields \"['result'][0]['BaseVolume']\", \"['result'][0]['PrevDay']\", ], 'getMarketSummariesV1', retry_data=True) if status == 200", "paths contains invalid syntax. \"\"\" retry = False attempt = 0 while attempt", "pair eg. 'BTC-ETH'. length: Not supported by the API, will always return all", "a non-200 response, the raw response body (may be None). On a 200", "ex, ''.join(traceback.format_tb(ex.__traceback__))) retry = False elif ex is not None: reason = await", "\"\"\" if config['tick_interval_secs'] == 60: self.tick_interval_str = 'oneMin' elif config['tick_interval_secs'] == 300: self.tick_interval_str", "the exception warrants a retry, False if no error or and unretryable error", "await self.call_extract([ \"['result']\", \"['result'][0]['Market']['BaseCurrency']\", # To retry on any missing fields \"['result'][0]['Market']['MinTradeSize']\", \"['result'][0]['Market']['IsActive']\",", "\"['result']\", \"['result'][0]['Last']\", # For retry of missing fields \"['result'][0]['BaseVolume']\", \"['result'][0]['PrevDay']\", ], 'getMarketSummariesV1', retry_data=True)", "kept current (unlike v2). \"\"\" await self.lock.acquire() if 'marketSummariesV1' in self.cache: if time.time()", "exception produced from an extract operation. Arguments: ex: Exception returned from :meth:`_extract_items`. data:", "\"\"\" Get a range of ticks (closing values and closing times) for a", "(int): The HTTP response status code. A value of 0 indicates a connection", "json.dumps(data, indent=2)) if not data['success'] and retry_fail: retry = True try: reason =", "else \"success == false (blank message)\" except KeyError: reason = \"success == false", "getting v1 market summaries: status {}, results {}.\", status, results) if 'marketSummariesV1' in", "data (str): The raw HTTP response body (may be None). status (int): The", "response with a missing response body, None. status (int): The HTTP response status", "issue occurred. \"\"\" if isinstance(ex, (TypeError, IndexError, KeyError)): reason = await Client._get_extract_failure_reason(ex, data)", "599 and status != 504) or (status in [0, 408, 429]): retry_reason =", "None return results[0] async def sell_limit(self, pair: str, quantity: float, value: float): \"\"\"", "status, results) return None return results[0] async def get_balance(self, base: str): \"\"\" \"\"\"", "notice) if 'will be removed' in notice or 'will be delisted' in notice", "if not prev_day: prev_day = last if notice: self.log.info(\"{} NOTICE: {}\", pair, notice)", "data is None: self.log.error(\"Failed on API method '{}({})': status {}, data {}\", method,", "body (may be None). status (int): The HTTP response status code. A value", "tick data from the API, or None if an error occurred or no", "method. Returns: (tuple): A tuple containing: (str): Full URL for the request. (dict):", "Get ticks (closing values and closing times) for a pair from the Bittrex", "API. log: If True, will log the API JSON response. This is optional", "response from the API. log: If True, will log the API JSON response.", "summaries from the Bittrex API. Returns: The market summaries dict. \"\"\" results, status,", "an error occurred. \"\"\" market_summaries = await self._get_market_summaries_v1() if market_summaries is None: return", "if 'marketSummariesV1' in self.cache: if time.time() - self.cache['marketSummariesV1']['time'] < config['tick_interval_secs']: self.log.debug(\"Returning cached data", "\"\"\" __author__ = '<NAME> <$(echo nqnz.enshfr#tznvy.pbz | tr a-z# n-za-m@)>' __version__ = \"0.2.0\"", "\"\"\" self.cache = { 'balance': {} } \"\"\" Response cache. \"\"\" self.tick_interval_str: str", "error, or None if no issue occurred. \"\"\" if isinstance(ex, (TypeError, IndexError, KeyError)):", "params, status, results) return None return results[0] async def get_balance(self, base: str): \"\"\"", "prev_day = last if notice: self.log.info(\"{} NOTICE: {}\", pair, notice) if 'will be", "summaries: status {}, results {}.\", status, results) return None summaries = {} for", "= False elif ex is not None: reason = await Client._get_extract_failure_reason(ex, data) retry", "'v1.1/market/selllimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True }, 'cancelOrder': { 'path': 'v1.1/market/cancel', 'params': 'uuid={}', 'auth':", "!= 200 or results is None or results[0] is None: self.log.error(\"Failed getting ticks:", "False }, 'getTicks': { 'path': 'v2.0/pub/market/getTicks', 'params': 'marketName={}&tickInterval={}', 'auth': False }, 'getLatestTick': {", "attempt = 0 status = 0 data = None url, headers = await", "a dictionary of data. Arguments: extract: List of strings representing the dictionary paths", "or results[0] is None: self.log.error(\"Failed executing cancel order request: params {} status {},", "[order_id] results, status = await self.call_extract([ \"['success']\" ], 'cancelOrder', params=params, log=True, retry_data=True) if", "\"success == false (blank message)\" except KeyError: reason = \"success == false (missing", "'will be removed' in notice or 'will be delisted' in notice or 'scheduled", "}, 'getTicks': { 'path': 'v2.0/pub/market/getTicks', 'params': 'marketName={}&tickInterval={}', 'auth': False }, 'getLatestTick': { 'path':", "Call a Bittrex API method and extract data items from its JSON response.", "selling) so should always be tested with retry=False (the default) first. Arguments: extract:", "for syncing access to API data. \"\"\" self.cache = { 'balance': {} }", "else: break if reason is not None: self.log.error(\"Giving up on: {}\", reason) return", "method and parse JSON response. Implements retry and exponential backoff for higher-level API", "url = API_URL.format(API_METHODS[method]['path'], query) signature = hmac.new(api_secret.encode(), url.encode(), hashlib.sha512).hexdigest() headers = {'apisign': signature}", "headers for the request, or None if no headers are required. \"\"\" query", "item expr_func = eval(expr) # pylint: disable=W0123 results.append(expr_func(data)) except (TypeError, IndexError, KeyError, SyntaxError,", "data items to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] data: Dictionary of data to extract", "in self.cache: if time.time() - self.cache['marketSummariesV1']['time'] < config['tick_interval_secs']: self.log.debug(\"Returning cached data for marketSummariesV1.\",", "None and results[0] is not None: market_summaries = {} for result in results[0]:", "retry = False return (data, status) @staticmethod async def _get_request_data(method: str, params: Sequence[Any]=None):", "JSON response. On a non-200 response, the raw response body (may be None).", "if status != 200: return (raw_data, status) if raw_data is None: retry_reason =", "if log: self.log.debug(\"API method '{}({})' response:\\n{}\", method, params, json.dumps(data, indent=2)) if not data['success']", "{ 'path': 'v2.0/pub/market/getTicks', 'params': 'marketName={}&tickInterval={}', 'auth': False }, 'getLatestTick': { 'path': 'v2.0/pub/market/getLatestTick', 'params':", "any missing fields \"['result'][0]['Market']['MinTradeSize']\", \"['result'][0]['Market']['IsActive']\", \"['result'][0]['Market']['Notice']\", \"['result'][0]['Summary']['MarketName']\", \"['result'][0]['Summary']['BaseVolume']\", \"['result'][0]['Summary']['PrevDay']\", \"['result'][0]['Summary']['Last']\", ], 'getMarketSummaries', retry_data=True,", "async def cancel_order(self, pair: str, order_id: str): \"\"\" \"\"\" params = [order_id] results,", "self.lock = asyncio.Lock() \"\"\" Lock used for syncing access to API data. \"\"\"", "the current tick interval. Converts the response list to a dict for faster", "the method. Returns: (tuple): A tuple containing: (str): Full URL for the request.", "None or not results[0]: self.log.error(\"Failed getting order: params{}, status {}, results {}.\", params,", "request, or None if no headers are required. \"\"\" query = API_METHODS[method]['params'].format(*params or", "query) headers = None return (url, headers) async def call_json(self, method: str, params:", "!= 504) or (status in [0, 408, 429]): retry_reason = 'status {}'.format(status) retry", "body, malformed response body (invalid JSON), or missing 'success' value. Arguments: method: Name", "retry_fail=False, log=False): \"\"\" Call a Bittrex API method and extract data items from", "headers = {'apisign': signature} else: url = API_URL.format(API_METHODS[method]['path'], query) headers = None return", "data. \"\"\" self.cache = { 'balance': {} } \"\"\" Response cache. \"\"\" self.tick_interval_str:", "async def call(self, method: str, params: Sequence[Any]=None): \"\"\" Call a Bittrex API method.", "a pair from the Bittrex API. Arguments: pair: The currency pair eg. 'BTC-ETH'.", "results is None or results[0] is None: self.log.error(\"Failed getting balance: params {}, status", "a Bittrex API method and parse JSON response. Implements retry and exponential backoff", "False attempt = 0 status = 0 data = None while attempt <", "[base] results, status = await self.call_extract([ \"['result']['Available']\", ], 'getBalance', params=params, log=True, retry_data=True) if", "await common.backoff(attempt, \"Bittrex call_extract {}\".format(method), reason) retry = False else: break if reason", "a result of the extraction attempt. \"\"\" if 'message' in data and data['message']", "'{}({})': status {}, data {}\", method, params, status, data) return (data, status) if", "def __init__(self, session: aiohttp.ClientSession, log=utils.logging.DummyLogger()): self.session = session \"\"\" Object HTTP client session.", "syntax errors in extract paths will be set to None. On a non-200", "\"\"\" Call a Bittrex API method and parse JSON response. Implements retry and", "with self.session.get(url, headers=headers) as response: status = response.status if status >= 200 and", "'auth': True }, 'getBalance': { 'path': 'v1.1/account/getbalance', 'params': 'currency={}', 'auth': True }, }", "value] results, status = await self.call_extract([ \"['result']['uuid']\", ], 'sellLimit', params=params, log=True, retry_data=True) if", "'v1.1/market/buylimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True }, 'sellLimit': { 'path': 'v1.1/market/selllimit', 'params': 'market={}&quantity={}&rate={}', 'auth':", "is None or results[0] is None: self.log.error(\"Failed getting ticks: params {}, status {},", "API. \"\"\" raise NotImplementedError(\"Tick range not supported by the Bittrex API.\") async def", "if config['tick_interval_secs'] == 60: self.tick_interval_str = 'oneMin' elif config['tick_interval_secs'] == 300: self.tick_interval_str =", "'auth': True }, 'sellLimit': { 'path': 'v1.1/market/selllimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True }, 'cancelOrder':", "(tuple): A tuple containing: (str): Full URL for the request. (dict): Dictionary of", "quantity, value] results, status = await self.call_extract([ \"['result']['uuid']\", ], 'buyLimit', params=params, log=True) if", "async def _handle_extract_exception(ex: Exception, data: Dict[str, Any], retry_data: bool): \"\"\" Handle any exception", "results[3], 'value': results[4], 'fees': results[5], } async def cancel_order(self, pair: str, order_id: str):", "get_market_summaries(self) -> List[Dict[str, Any]]: \"\"\" Get the market summaries from the Bittrex API.", "\"['result'][0]['BaseVolume']\", \"['result'][0]['PrevDay']\", ], 'getMarketSummariesV1', retry_data=True) if status == 200 and results is not", "Implements retry and exponentional backoff for HTTP level error conditions. Arguments: method: Name", "\"['result'][0]['PrevDay']\", ], 'getMarketSummariesV1', retry_data=True) if status == 200 and results is not None", "in notice: self.log.info(\"{} marked as inactive due to pending removal.\", pair) active =", "(status in [0, 408, 429]): retry_reason = 'status {}'.format(status) retry = True else:", "retry_reason) retry = False return (data, status) async def call_extract(self, extract: Sequence[str], method:", "or None if no exception occurred. \"\"\" ex = None results = []", "retry_data: bool): \"\"\" Handle any exception produced from an extract operation. Arguments: ex:", "'prevDay': prev_day, 'last': last, } return summaries async def get_ticks(self, pair: str, length:", "raw tick data from the API, or None if an error occurred or", "'getBalance': { 'path': 'v1.1/account/getbalance', 'params': 'currency={}', 'auth': True }, } class Client(api.Client): \"\"\"", "200 or results is None or results[0] is None: self.log.error(\"Failed executing buy order", "request URL and headers for a given API method and parameter list. Forms", "paths will not be retried. retry_fail: If True, will perform backoff and retry", "from :meth:`_extract_items`. data: Dictionary of data passed to :meth:`_extract_items`. retry_data: True if missing", "results[0] async def sell_limit(self, pair: str, quantity: float, value: float): \"\"\" \"\"\" params", "return (tuple(results), status) @staticmethod async def _extract_items(extract: Sequence[str], data: Dict[str, Any]): \"\"\" Extract", "None summaries = {} for summary in results[0]: pair = summary['Summary']['MarketName'] active =", "'params': 'market={}&quantity={}&rate={}', 'auth': True }, 'sellLimit': { 'path': 'v1.1/market/selllimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True", "JSON response. This is optional as some responses can be quite large. Returns:", "range not supported by the Bittrex API.\") async def get_last_values(self, pair: str) ->", "tested with retry=False (the default) first. Arguments: extract: A list of strings representing", "Returns: A list of the raw tick data from the API, or None", "is None or results[0] is None: self.log.error(\"Failed getting market summaries: status {}, results", "200 or results is None or not results[0]: self.log.error(\"Failed getting order: params{}, status", "retry_data) if retry: attempt += 1 await common.backoff(attempt, \"Bittrex call_extract {}\".format(method), reason) retry", "be passed in headers. Arguments: method: Name of the API method to call.", "a response with a missing response body, None. status (int): The HTTP response", "false otherwise. Returns: (tuple): A tuple containing: (bool): True if the exception warrants", "{}\", method, params, status, data) return (data, status) if log: self.log.debug(\"API method '{}({})'", "exists \"['result'][0]['T']\" ], 'getTicks', params=params, retry_data=True, retry_fail=True) if status != 200 or results", "is not None: self.log.error(\"Giving up on: {}\", reason) return (tuple(results), status) @staticmethod async", "status, results) return None for tick in results[0]: close_datetime = datetime.strptime(tick['T'], TIME_FORMAT) tick['T']", "of query parameters to pass to the method. Returns: (tuple): A tuple containing:", "notice: self.log.info(\"{} NOTICE: {}\", pair, notice) if 'will be removed' in notice or", "is None: return None return (market_summaries[pair]['Last'], market_summaries[pair]['BaseVolume']) async def buy_limit(self, pair: str, quantity:", "self._get_request_data(method, params) while attempt < config['http_max_retries']: try: async with self.session.get(url, headers=headers) as response:", "executing cancel order request: params {} status {}, results {}.\", params, status, results)", "On a normal 200 response, a tuple containing the values for each extracted", "containing: (bool): True if the exception warrants a retry, False if no error", "and data['success']: retry = True else: retry = False elif isinstance(ex, (SyntaxError, NameError)):", "= 0 data = None url, headers = await self._get_request_data(method, params) while attempt", "True }, 'cancelOrder': { 'path': 'v1.1/market/cancel', 'params': 'uuid={}', 'auth': True }, 'getOrder': {", "will not be retried. retry_fail: If True, will perform backoff and retry on", "expr_func = eval(expr) # pylint: disable=W0123 results.append(expr_func(data)) except (TypeError, IndexError, KeyError, SyntaxError, NameError)", "or 'will be delisted' in notice or 'scheduled for delisting' in notice: self.log.info(\"{}", "summary['Summary']['BaseVolume'], 'prevDay': prev_day, 'last': last, } return summaries async def get_ticks(self, pair: str,", "200 or results is None or results[0] is None: self.log.error(\"Failed executing cancel order", "IndexError, KeyError)): reason = await Client._get_extract_failure_reason(ex, data) if retry_data and data['success']: retry =", "data) if retry_data and data['success']: retry = True else: retry = False elif", "passed extract dict paths contains invalid syntax. \"\"\" retry = False attempt =", "self.call_extract([ \"['success']\", \"['result']['IsOpen']\", \"['result']['Quantity']\", \"['result']['QuantityRemaining']\", \"['result']['PricePerUnit']\", \"['result']['CommissionPaid']\", ], 'getOrder', params=params, log=True, retry_data=True) if", "tick interval. Converts the response list to a dict for faster lookups. This", "status) if raw_data is None: retry_reason = \"'None' on successful response\" retry =", "if (status >= 500 and status <= 599 and status != 504) or", "None if no exception occurred. \"\"\" ex = None results = [] for", "Forms the full URL with query string and calculates any needed HMAC signature", "<= 399: data = await response.text() break if (status >= 500 and status", "market summaries from the Bittrex API. Returns: The market summaries dict. \"\"\" results,", "{}, results {}.\", params, status, results) return None return results[0] async def get_order(self,", "query = API_METHODS[method]['params'].format(*params or []) if API_METHODS[method]['auth']: nonce = int(time.time() * 1000) api_key", "return None return results[0] async def get_balance(self, base: str): \"\"\" \"\"\" params =", "= \"{}: {}\\n{}\".format(type(ex).__name__, ex, ''.join(traceback.format_tb(ex.__traceback__))) retry = False elif ex is not None:", "not None: self.log.error(\"Giving up on: {}\", reason) return (tuple(results), status) @staticmethod async def", "error occurred. \"\"\" market_summaries = await self._get_market_summaries_v1() if market_summaries is None: return None", "}, 'getOrder': { 'path': 'v1.1/account/getorder', 'params': 'uuid={}', 'auth': True }, 'getBalance': { 'path':", "= await response.text() break except (aiohttp.ClientConnectionError, aiohttp.ClientPayloadError, asyncio.TimeoutError) as e: retry_reason = '{}:", "each extracted item. Any items that failed to be extracted after exhausting all", "{}\", pair, notice) if 'will be removed' in notice or 'will be delisted'", "verbosity=1) self.lock.release() return self.cache['marketSummariesV1']['data'] results, status = await self.call_extract([ \"['result']\", \"['result'][0]['Last']\", # For", "\"\"\" params = [base] results, status = await self.call_extract([ \"['result']['Available']\", ], 'getBalance', params=params,", "retry = True if not retry: try: data = json.loads(raw_data) _ = data['success']", "'open': results[1], 'quantity': results[2], 'remaining': results[3], 'value': results[4], 'fees': results[5], } async def", "= '<NAME> <$(echo nqnz.enshfr#tznvy.pbz | tr a-z# n-za-m@)>' __version__ = \"0.2.0\" __all__ =", "for summary in results[0]: pair = summary['Summary']['MarketName'] active = summary['Market']['IsActive'] notice = summary['Market']['Notice']", "else: self.lock.release() return None self.cache['marketSummariesV1'] = { 'time': time.time(), 'data': market_summaries } self.lock.release()", "ex = None results = [] for item in extract: try: expr =", "URL and headers for a given API method and parameter list. Forms the", "query) signature = hmac.new(api_secret.encode(), url.encode(), hashlib.sha512).hexdigest() headers = {'apisign': signature} else: url =", "False if no error or and unretryable error occurred. (str): Sentence fragment or", "containing: data (object): On a normal 200 response, a tuple containing the values", "Dict[str, Any], retry_data: bool): \"\"\" Handle any exception produced from an extract operation.", "traceback describing the reason for retry or error, or None if no issue", "True if retry: attempt += 1 await common.backoff(attempt, \"Bittrex call_json {}\".format(method), retry_reason) retry", "and extract data items from its JSON response. Implements retry and exponential backoff", "data = await response.text() break except (aiohttp.ClientConnectionError, aiohttp.ClientPayloadError, asyncio.TimeoutError) as e: retry_reason =", "backoff for invalid data items. Caution must be taken to ensure that the", "On a response with a missing response body, None. status (int): The HTTP", "exception that occurred during extraction, or None if no exception occurred. \"\"\" ex", "retry_data: If True, will perform backoff and retry on empty or missing data", "params: Sequence[Any]=None): \"\"\" Call a Bittrex API method. Implements retry and exponentional backoff", "the Bittrex API. Returns: The market summaries dict. \"\"\" results, status, = await", "dict for faster lookups. This data is used for batching tick updates, since", "False elif ex is not None: reason = await Client._get_extract_failure_reason(ex, data) retry =", "NOTICE: {}\", pair, notice) if 'will be removed' in notice or 'will be", "config = configuration.config \"\"\" Global configuration. \"\"\" TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' API_URL = 'https://bittrex.com/api/{}?{}'", "retry and exponential backoff for higher-level API error conditions on a 200 response,", "{}, status {}, results {}.\", params, status, results) return None return results[0] async", "retry, False if no error or and unretryable error occurred. (str): Sentence fragment", "The currency pair eg. 'BTC-ETH'. length: Not supported by the API, will always", "Sequence[Any]=None): \"\"\" Call a Bittrex API method. Implements retry and exponentional backoff for", "price and 24-hour volume for a currency pair from the API. Arguments: pair:", "if raw_data is None: retry_reason = \"'None' on successful response\" retry = True", "market summaries dict. \"\"\" results, status, = await self.call_extract([ \"['result']\", \"['result'][0]['Market']['BaseCurrency']\", # To", "float]: \"\"\" Get the last price and 24-hour volume for a currency pair", "a syntax or or extraction error occurred. Exception: The last exception that occurred", "\"['result'][0]['Market']['Notice']\", \"['result'][0]['Summary']['MarketName']\", \"['result'][0]['Summary']['BaseVolume']\", \"['result'][0]['Summary']['PrevDay']\", \"['result'][0]['Summary']['Last']\", ], 'getMarketSummaries', retry_data=True, retry_fail=True) if status != 200", "retry = False return (data, status) async def call_extract(self, extract: Sequence[str], method: str,", "supported by the Bittrex API.\") async def get_last_values(self, pair: str) -> Tuple[float, float]:", "], 'buyLimit', params=params, log=True) if status != 200 or results is None or", "API. Returns: The market summaries dict. \"\"\" results, status, = await self.call_extract([ \"['result']\",", "for a pair from the Bittrex API. Arguments: pair: The currency pair eg.", "<= config['api_max_retries']: data, status = await self.call_json(method, params) if status != 200 or", "'BTC-ETH' Returns: (tuple): A tuple containing: float: The current close price, or None", "a dict for faster lookups. This data is used for batching tick updates,", "query parameters to pass to the method. Returns: (tuple): A tuple containing: data", "dictionary paths of the response data items to extract, eg. [\"['result'][0]['C']\", \"['result'][0]['T']\"] data:", "and parameter list. Forms the full URL with query string and calculates any", "data: Dict[str, Any], retry_data: bool): \"\"\" Handle any exception produced from an extract", "'': api_message = data['message'] else: api_message = 'empty or missing results' return \"{}", "last, } return summaries async def get_ticks(self, pair: str, length: int=None) -> List[Dict[str,", "self._extract_items(extract, data) retry, reason = await self._handle_extract_exception(ex, data, retry_data) if retry: attempt +=", "{}'.format(status) retry = True else: self.log.error('Got non-retryable status {}.', status) data = await", "status != 200 or results is None or not results[0]: self.log.error(\"Failed getting order:", "api_key = config['bittrex_api_key'] api_secret = config['bittrex_api_secret'] query = 'apikey={}&nonce={}&'.format(api_key, nonce) + query url", "volume, or None if an error occurred. \"\"\" market_summaries = await self._get_market_summaries_v1() if", "To retry if not at least one element exists \"['result'][0]['T']\" ], 'getTicks', params=params,", "interacting with the Bittrex API. \"\"\" def __init__(self, session: aiohttp.ClientSession, log=utils.logging.DummyLogger()): self.session =", "no exception occurred. \"\"\" ex = None results = [] for item in", "status = await self.call_extract([ \"['result']\", \"['result'][0]['Last']\", # For retry of missing fields \"['result'][0]['BaseVolume']\",", "return None self.cache['marketSummariesV1'] = { 'time': time.time(), 'data': market_summaries } self.lock.release() return market_summaries", "'', 'auth': False }, 'getTicks': { 'path': 'v2.0/pub/market/getTicks', 'params': 'marketName={}&tickInterval={}', 'auth': False },", "\"['result']\", \"['result'][0]['C']\", # To retry if not at least one element exists \"['result'][0]['T']\"", "'getLatestTick': { 'path': 'v2.0/pub/market/getLatestTick', 'params': 'marketName={}&tickInterval={}', 'auth': False }, 'getTicker': { 'path': 'v1.1/public/getticker',", "Bittrex API.\") async def get_last_values(self, pair: str) -> Tuple[float, float]: \"\"\" Get the", "market_summaries = {} for result in results[0]: market_summaries[result['MarketName']] = result else: self.log.error(\"Failed getting", "API error conditions on a 200 response, specifically empty response body, malformed response", "True, will perform backoff and retry on explicit failure response from the API.", "}, 'sellLimit': { 'path': 'v1.1/market/selllimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True }, 'cancelOrder': { 'path':", "raw_data, status = await self.call(method, params) if status != 200: return (raw_data, status)", "call_json(self, method: str, params: list=None): \"\"\" Call a Bittrex API method and parse", "Object logger. \"\"\" self.lock = asyncio.Lock() \"\"\" Lock used for syncing access to", "with query string and calculates any needed HMAC signature to be passed in", "attempt < config['http_max_retries']: try: async with self.session.get(url, headers=headers) as response: status = response.status", "connection or transport failure. Raises: SyntaxError, NameError: If one or more of the", "of data. Arguments: extract: List of strings representing the dictionary paths of the", "as inactive due to pending removal.\", pair) active = False summaries[pair] = {", "the passed extract dict paths contains invalid syntax. \"\"\" retry = False attempt", "self._get_market_summaries_v1() if market_summaries is None: return None return (market_summaries[pair]['Last'], market_summaries[pair]['BaseVolume']) async def buy_limit(self,", "summary['Market']['BaseCurrency'], 'minTradeQty': summary['Market']['MinTradeSize'], 'minTradeSize': 0.0, 'minTradeValue': 0.0, 'baseVolume': summary['Summary']['BaseVolume'], 'prevDay': prev_day, 'last': last,", "None. On a non-200 response, the raw response body (may be None). On", "containing: data (object): On success, a dict containing the parsed JSON response. On", "containing the values for each extracted item. Any items that failed to be", "method: Name of the API method to call. params: Values of query parameters", "and data['message'] and data['message'] != '': api_message = data['message'] else: api_message = 'empty", "params, status, results) return None return results[0] async def get_order(self, pair: str, order_id:", "self.session.get(url, headers=headers) as response: status = response.status if status >= 200 and status", "= session \"\"\" Object HTTP client session. \"\"\" self.log = utils.logging.ChildLogger(parent=log, scope=self) \"\"\"", "SyntaxError, NameError) as e: ex = e results.append(None) return (results, ex) @staticmethod async", "status = await self.call_extract([ \"['success']\", \"['result']['IsOpen']\", \"['result']['Quantity']\", \"['result']['QuantityRemaining']\", \"['result']['PricePerUnit']\", \"['result']['CommissionPaid']\", ], 'getOrder', params=params,", "url.encode(), hashlib.sha512).hexdigest() headers = {'apisign': signature} else: url = API_URL.format(API_METHODS[method]['path'], query) headers =", "retry = False else: reason = None retry = False return (retry, reason)", "(dict): Dictionary of headers for the request, or None if no headers are", "indicates a connection or transport failure. Raises: SyntaxError, NameError: If one or more", "tick interval. \"\"\" if config['tick_interval_secs'] == 60: self.tick_interval_str = 'oneMin' elif config['tick_interval_secs'] ==", "int=None) -> List[Dict[str, Any]]: \"\"\" Get ticks (closing values and closing times) for", "\"\"\" await self.lock.acquire() if 'marketSummariesV1' in self.cache: if time.time() - self.cache['marketSummariesV1']['time'] < config['tick_interval_secs']:", "None if an error occurred or no ticks are available. \"\"\" params =", "{}, status {}, results {}.\", params, status, results) return None balance = results[0]", "be None). On a response with a missing response body, None. status (int):", "passed in headers. Arguments: method: Name of the API method to call. params:", "to the method. Returns: (tuple): A tuple containing: data (object): On success, a", "True try: reason = data['message'] if data['message'] != '' else \"success == false", "data = None while attempt < config['http_max_retries']: raw_data, status = await self.call(method, params)", "self.call(method, params) if status != 200: return (raw_data, status) if raw_data is None:", "'%Y-%m-%dT%H:%M:%S' API_URL = 'https://bittrex.com/api/{}?{}' API_METHODS = { 'getMarketSummaries': { 'path': 'v2.0/pub/markets/getMarketSummaries', 'params': '',", "exponential backoff for invalid data items. Caution must be taken to ensure that", "batching tick updates, since the v1 API is kept current (unlike v2). \"\"\"", "ticks (closing values and closing times) for a pair from the Bittrex API.", "and retry_fail: retry = True try: reason = data['message'] if data['message'] != ''", "a Bittrex API method. Implements retry and exponentional backoff for HTTP level error", "_extract_items(extract: Sequence[str], data: Dict[str, Any]): \"\"\" Extract items from a dictionary of data.", "self.tick_interval_str = 'oneMin' elif config['tick_interval_secs'] == 300: self.tick_interval_str = 'fiveMin' else: raise ValueError(\"Unsupported", "= time.time() self.lock.release() return self.cache['marketSummariesV1']['data'] else: self.lock.release() return None self.cache['marketSummariesV1'] = { 'time':", "Converts the response list to a dict for faster lookups. This data is", "getting balance: params {}, status {}, results {}.\", params, status, results) return None", "if status >= 200 and status <= 399: data = await response.text() break", "return results[0] async def sell_limit(self, pair: str, quantity: float, value: float): \"\"\" \"\"\"", "{}, results {}.\", params, status, results) return None return { 'open': results[1], 'quantity':", "== 60: self.tick_interval_str = 'oneMin' elif config['tick_interval_secs'] == 300: self.tick_interval_str = 'fiveMin' else:", "408, 429]): retry_reason = 'status {}'.format(status) retry = True else: self.log.error('Got non-retryable status", "= 0 while attempt <= config['api_max_retries']: data, status = await self.call_json(method, params) if", "while attempt <= config['api_max_retries']: data, status = await self.call_json(method, params) if status !=", "= await self.call_extract([ \"['result']\", \"['result'][0]['Market']['BaseCurrency']\", # To retry on any missing fields \"['result'][0]['Market']['MinTradeSize']\",", "(closing values and closing times) for a pair from the Bittrex API. \"\"\"", "If True, will log the API JSON response. This is optional as some", "method '{}({})': status {}, data {}\", method, params, status, data) return (data, status)", "\"\"\" String representation of the configured tick interval. \"\"\" if config['tick_interval_secs'] == 60:", "= response.status if status >= 200 and status <= 399: data = await", "status == 200 and results is not None and results[0] is not None:", "data items. Syntax errors in extract paths will not be retried. retry_fail: If", "return self.cache['marketSummariesV1']['data'] results, status = await self.call_extract([ \"['result']\", \"['result'][0]['Last']\", # For retry of", "None or results[0] is None: self.log.error(\"Failed executing sell order request: params {}, status", "conditions on a 200 response, specifically empty response body, malformed response body (invalid", "await common.backoff(attempt, \"Bittrex call {}\".format(method), retry_reason) retry = False return (data, status) @staticmethod", "config['tick_interval_secs'] == 60: self.tick_interval_str = 'oneMin' elif config['tick_interval_secs'] == 300: self.tick_interval_str = 'fiveMin'", "retry = False attempt = 0 status = 0 data = None while", "each extracted path, or None if a syntax or or extraction error occurred.", "attempt < config['http_max_retries']: raw_data, status = await self.call(method, params) if status != 200:", "times) for a pair from the Bittrex API. Arguments: pair: The currency pair", "Call a Bittrex API method and parse JSON response. Implements retry and exponential", "-*- coding: utf-8 -*- \"\"\" Bittrex API module. \"\"\" __author__ = '<NAME> <$(echo", "= False return (retry, reason) @staticmethod async def _get_extract_failure_reason(ex: Exception, data: Dict[str, Any]):", "the API, or None if an error occurred or no ticks are available.", "asyncio import hashlib import traceback from datetime import datetime, timezone from typing import", "return balance async def _get_market_summaries_v1(self): \"\"\" Get v1 market summaries from the API,", "data items. Caution must be taken to ensure that the specified extract dict", "or results[0] is None: self.log.error(\"Failed getting ticks: params {}, status {}, results {}.\",", "(str): Sentence fragment or formatted traceback describing the reason for retry or error,", "= {} for summary in results[0]: pair = summary['Summary']['MarketName'] active = summary['Market']['IsActive'] notice", "pending removal.\", pair) active = False summaries[pair] = { 'active': active, 'baseCurrency': summary['Market']['BaseCurrency'],", "can be quite large. Returns: (tuple): A tuple containing: data (object): On a", "float: The current close price, or None if an error occurred. float: The", "params {} status {}, results {}.\", params, status, results) return None return results[0]", "from datetime import datetime, timezone from typing import Any, Dict, List, Sequence, Tuple", "warrants a retry, False if no error or and unretryable error occurred. (str):", "reason = \"{}: {}\\n{}\".format(type(ex).__name__, ex, ''.join(traceback.format_tb(ex.__traceback__))) retry = False elif ex is not", "and status <= 399: data = await response.text() break if (status >= 500", "break except (aiohttp.ClientConnectionError, aiohttp.ClientPayloadError, asyncio.TimeoutError) as e: retry_reason = '{}: {}'.format(type(e).__name__, e) retry", "is None: self.log.error(\"Failed getting balance: params {}, status {}, results {}.\", params, status,", "extracted path, or None if a syntax or or extraction error occurred. Exception:", "], 'getMarketSummaries', retry_data=True, retry_fail=True) if status != 200 or results is None or", "summary['Market']['IsActive'] notice = summary['Market']['Notice'] last = summary['Summary']['Last'] prev_day = summary['Summary']['PrevDay'] if not prev_day:", "import Any, Dict, List, Sequence, Tuple import api import utils import common import", "retry = False elif ex is not None: reason = await Client._get_extract_failure_reason(ex, data)", "name eg. 'BTC-ETH' Returns: (tuple): A tuple containing: float: The current close price,", "fields \"['result'][0]['Market']['MinTradeSize']\", \"['result'][0]['Market']['IsActive']\", \"['result'][0]['Market']['Notice']\", \"['result'][0]['Summary']['MarketName']\", \"['result'][0]['Summary']['BaseVolume']\", \"['result'][0]['Summary']['PrevDay']\", \"['result'][0]['Summary']['Last']\", ], 'getMarketSummaries', retry_data=True, retry_fail=True) if", "\"['result']['Available']\", ], 'getBalance', params=params, log=True, retry_data=True) if status != 200 or results is", "be removed' in notice or 'will be delisted' in notice or 'scheduled for", "buy_limit(self, pair: str, quantity: float, value: float): \"\"\" \"\"\" params = [pair, quantity,", "return (data, status) async def call_extract(self, extract: Sequence[str], method: str, params: Sequence[Any]=None, retry_data=False,", "extraction attempt. \"\"\" if 'message' in data and data['message'] and data['message'] != '':", "responses can be quite large. Returns: (tuple): A tuple containing: data (object): On", "\"\"\" Client for interacting with the Bittrex API. \"\"\" def __init__(self, session: aiohttp.ClientSession,", "'{}: {}'.format(type(e).__name__, e) retry = True if retry: attempt += 1 await common.backoff(attempt,", "\"['result']['Quantity']\", \"['result']['QuantityRemaining']\", \"['result']['PricePerUnit']\", \"['result']['CommissionPaid']\", ], 'getOrder', params=params, log=True, retry_data=True) if status != 200", "results[0] is None: self.log.error(\"Failed getting market summaries: status {}, results {}.\", status, results)", "status, results) return None return results[0] async def get_order(self, pair: str, order_id: str):", "response.text() break except (aiohttp.ClientConnectionError, aiohttp.ClientPayloadError, asyncio.TimeoutError) as e: retry_reason = '{}: {}'.format(type(e).__name__, e)", "results) return None return { 'open': results[1], 'quantity': results[2], 'remaining': results[3], 'value': results[4],", "if notice: self.log.info(\"{} NOTICE: {}\", pair, notice) if 'will be removed' in notice", "0.0, 'baseVolume': summary['Summary']['BaseVolume'], 'prevDay': prev_day, 'last': last, } return summaries async def get_ticks(self,", "KeyError: reason = \"success == false (missing message)\" if not retry: results, ex", "exponential backoff for higher-level API error conditions on a 200 response, specifically empty", "non-200 response, the raw response body (may be None). On a 200 response", "= True if not retry: try: data = json.loads(raw_data) _ = data['success'] return", "(str): Full URL for the request. (dict): Dictionary of headers for the request,", "= [pair, quantity, value] results, status = await self.call_extract([ \"['result']['uuid']\", ], 'sellLimit', params=params,", "None return { 'open': results[1], 'quantity': results[2], 'remaining': results[3], 'value': results[4], 'fees': results[5],", "None if an error occurred. \"\"\" market_summaries = await self._get_market_summaries_v1() if market_summaries is", "is not None: reason = await Client._get_extract_failure_reason(ex, data) retry = False else: reason", "{ 'open': results[1], 'quantity': results[2], 'remaining': results[3], 'value': results[4], 'fees': results[5], } async", "API, or None if an error occurred or no ticks are available. \"\"\"", "results is None or results[0] is None: self.log.error(\"Failed getting market summaries: status {},", "'getMarketSummaries', retry_data=True, retry_fail=True) if status != 200 or results is None or results[0]", "or results is None or results[0] is None: self.log.error(\"Failed getting ticks: params {},", "A tuple containing: list: Result of each extracted path, or None if a", "Dictionary of data passed to :meth:`_extract_items`. retry_data: True if missing data should be", "pass to the method. Returns: (tuple): A tuple containing: (str): Full URL for", "Sequence[Any]=None): \"\"\" Get the request URL and headers for a given API method", "await self.lock.acquire() if 'marketSummariesV1' in self.cache: if time.time() - self.cache['marketSummariesV1']['time'] < config['tick_interval_secs']: self.log.debug(\"Returning", "summary['Summary']['MarketName'] active = summary['Market']['IsActive'] notice = summary['Market']['Notice'] last = summary['Summary']['Last'] prev_day = summary['Summary']['PrevDay']", "{}, results {}.\", params, status, results) return None return results[0] async def sell_limit(self,", "reason = None retry = False return (retry, reason) @staticmethod async def _get_extract_failure_reason(ex:", "API method and parse JSON response. Implements retry and exponential backoff for higher-level", "str): \"\"\" \"\"\" params = [order_id] results, status = await self.call_extract([ \"['success']\" ],", "is None or not results[0]: self.log.error(\"Failed getting order: params{}, status {}, results {}.\",", "containing: list: Result of each extracted path, or None if a syntax or", "'<NAME> <$(echo nqnz.enshfr#tznvy.pbz | tr a-z# n-za-m@)>' __version__ = \"0.2.0\" __all__ = ['Client']", "JSON response. Implements retry and exponential backoff for invalid data items. Caution must", "True }, 'sellLimit': { 'path': 'v1.1/market/selllimit', 'params': 'market={}&quantity={}&rate={}', 'auth': True }, 'cancelOrder': {", "retried, false otherwise. Returns: (tuple): A tuple containing: (bool): True if the exception", "method to call. params: Values of query parameters to pass to the method.", "self.cache['marketSummariesV1']['time'] = time.time() self.lock.release() return self.cache['marketSummariesV1']['data'] else: self.lock.release() return None self.cache['marketSummariesV1'] = {", "higher-level API error conditions on a 200 response, specifically empty response body, malformed", "exception warrants a retry, False if no error or and unretryable error occurred.", "removal.\", pair) active = False summaries[pair] = { 'active': active, 'baseCurrency': summary['Market']['BaseCurrency'], 'minTradeQty':", "message (if present). Arguments: data: Dict of the parsed API response. ex: Exception", "'path': 'v2.0/pub/market/getTicks', 'params': 'marketName={}&tickInterval={}', 'auth': False }, 'getLatestTick': { 'path': 'v2.0/pub/market/getLatestTick', 'params': 'marketName={}&tickInterval={}',", "-> Tuple[float, float]: \"\"\" Get the last price and 24-hour volume for a", "status, results) if 'marketSummariesV1' in self.cache: self.cache['marketSummariesV1']['time'] = time.time() self.lock.release() return self.cache['marketSummariesV1']['data'] else:", "invalid syntax. \"\"\" retry = False attempt = 0 while attempt <= config['api_max_retries']:", "the method. Returns: (tuple): A tuple containing: data (object): On success, a dict", "'auth': True }, 'getOrder': { 'path': 'v1.1/account/getorder', 'params': 'uuid={}', 'auth': True }, 'getBalance':", "tuple containing: data (object): On success, a dict containing the parsed JSON response.", "data['success']: retry = True else: retry = False elif isinstance(ex, (SyntaxError, NameError)): reason", "if status == 200 and results is not None and results[0] is not", "{}.\", params, status, results) return None return { 'open': results[1], 'quantity': results[2], 'remaining':", "status = await self.call_extract([ \"['result']['Available']\", ], 'getBalance', params=params, log=True, retry_data=True) if status !=", "def get_market_summaries(self) -> List[Dict[str, Any]]: \"\"\" Get the market summaries from the Bittrex", "response body (invalid JSON), or missing 'success' value. Arguments: method: Name of the", "or []) if API_METHODS[method]['auth']: nonce = int(time.time() * 1000) api_key = config['bittrex_api_key'] api_secret", "= None retry = False return (retry, reason) @staticmethod async def _get_extract_failure_reason(ex: Exception,", "Response cache. \"\"\" self.tick_interval_str: str \"\"\" String representation of the configured tick interval.", "to extract items from. Returns: (tuple): A tuple containing: list: Result of each", "URL for the request. (dict): Dictionary of headers for the request, or None", "non-200 response, the raw response body (may be None). On a response with", "data is used for batching tick updates, since the v1 API is kept", "= results[0] self.cache['balance'][base] = { 'time': time.time(), 'data': balance } return balance async", "isinstance(ex, (TypeError, IndexError, KeyError)): reason = await Client._get_extract_failure_reason(ex, data) if retry_data and data['success']:", "= None while attempt < config['http_max_retries']: raw_data, status = await self.call(method, params) if", "True }, 'getBalance': { 'path': 'v1.1/account/getbalance', 'params': 'currency={}', 'auth': True }, } class", "interval: {}\".format(config['tick_interval_secs'])) async def call(self, method: str, params: Sequence[Any]=None): \"\"\" Call a Bittrex", "data['message'] if data['message'] != '' else \"success == false (blank message)\" except KeyError:", "ex is not None: reason = await Client._get_extract_failure_reason(ex, data) retry = False else:", "volume for a currency pair from the API. Arguments: pair: Currency pair name", "(results, ex) @staticmethod async def _handle_extract_exception(ex: Exception, data: Dict[str, Any], retry_data: bool): \"\"\"", "(tuple): A tuple containing: list: Result of each extracted path, or None if", "delisting' in notice: self.log.info(\"{} marked as inactive due to pending removal.\", pair) active", "log=utils.logging.DummyLogger()): self.session = session \"\"\" Object HTTP client session. \"\"\" self.log = utils.logging.ChildLogger(parent=log,", "raise NotImplementedError(\"Tick range not supported by the Bittrex API.\") async def get_last_values(self, pair:", "results, status, = await self.call_extract([ \"['result']\", \"['result'][0]['C']\", # To retry if not at", "status >= 200 and status <= 399: data = await response.text() break if", "200 or results is None or results[0] is None: self.log.error(\"Failed getting balance: params", "or had syntax errors in extract paths will be set to None. On", "pair: str, order_id: str): \"\"\" \"\"\" params = [order_id] results, status = await", "for the request. (dict): Dictionary of headers for the request, or None if", "self.call_extract([ \"['result']\", \"['result'][0]['Last']\", # For retry of missing fields \"['result'][0]['BaseVolume']\", \"['result'][0]['PrevDay']\", ], 'getMarketSummariesV1',", "A value of 0 indicates a connection or transport failure. \"\"\" retry =", "after exhausting all retries, or had syntax errors in extract paths will be", "must be taken to ensure that the specified extract dict keys are correct", "while attempt < config['http_max_retries']: raw_data, status = await self.call(method, params) if status !=", "'getMarketSummaries': { 'path': 'v2.0/pub/markets/getMarketSummaries', 'params': '', 'auth': False }, 'getMarketSummariesV1': { 'path': 'v1.1/public/getMarketSummaries',", "= [] for item in extract: try: expr = 'lambda d: d' +", "def _get_request_data(method: str, params: Sequence[Any]=None): \"\"\" Get the request URL and headers for", "'path': 'v1.1/public/getMarketSummaries', 'params': '', 'auth': False }, 'getTicks': { 'path': 'v2.0/pub/market/getTicks', 'params': 'marketName={}&tickInterval={}',", "'message' in data and data['message'] and data['message'] != '': api_message = data['message'] else:", "pair, notice) if 'will be removed' in notice or 'will be delisted' in", "results, status = await self.call_extract([ \"['result']\", \"['result'][0]['Last']\", # For retry of missing fields", "perform backoff and retry on explicit failure response from the API. log: If", "retry_data=False, retry_fail=False, log=False): \"\"\" Call a Bittrex API method and extract data items", "Lock used for syncing access to API data. \"\"\" self.cache = { 'balance':", "= [pair, self.tick_interval_str] results, status, = await self.call_extract([ \"['result']\", \"['result'][0]['C']\", # To retry", "items. Caution must be taken to ensure that the specified extract dict keys", "pylint: disable=W0123 results.append(expr_func(data)) except (TypeError, IndexError, KeyError, SyntaxError, NameError) as e: ex =", "returned from :meth:`_extract_items`. data: Dictionary of data passed to :meth:`_extract_items`. retry_data: True if", "(aiohttp.ClientConnectionError, aiohttp.ClientPayloadError, asyncio.TimeoutError) as e: retry_reason = '{}: {}'.format(type(e).__name__, e) retry = True", "supported by the API, will always return all ticks. Returns: A list of", "interval. Converts the response list to a dict for faster lookups. This data", "the v1 API is kept current (unlike v2). \"\"\" await self.lock.acquire() if 'marketSummariesV1'", "exponentional backoff for HTTP level error conditions. Arguments: method: Name of the API", "config['http_max_retries']: raw_data, status = await self.call(method, params) if status != 200: return (raw_data,", "extraction, or None if no exception occurred. \"\"\" ex = None results =", "Result of each extracted path, or None if a syntax or or extraction", "= \"'None' on successful response\" retry = True if not retry: try: data", "API_URL.format(API_METHODS[method]['path'], query) headers = None return (url, headers) async def call_json(self, method: str,", "str \"\"\" String representation of the configured tick interval. \"\"\" if config['tick_interval_secs'] ==", "'lambda d: d' + item expr_func = eval(expr) # pylint: disable=W0123 results.append(expr_func(data)) except", "'last': last, } return summaries async def get_ticks(self, pair: str, length: int=None) ->", "cancel order request: params {} status {}, results {}.\", params, status, results) return", "value\" retry = True if retry: attempt += 1 await common.backoff(attempt, \"Bittrex call_json", "results[0]: close_datetime = datetime.strptime(tick['T'], TIME_FORMAT) tick['T'] = close_datetime.replace(tzinfo=timezone.utc).timestamp() return results[0] async def get_tick_range(self,", "extract operation. Arguments: ex: Exception returned from :meth:`_extract_items`. data: Dictionary of data passed", "attempt += 1 await common.backoff(attempt, \"Bittrex call_json {}\".format(method), retry_reason) retry = False return", "missing data items. Syntax errors in extract paths will not be retried. retry_fail:", "\"\"\" market_summaries = await self._get_market_summaries_v1() if market_summaries is None: return None return (market_summaries[pair]['Last'],", "{}, results {}.\", status, results) if 'marketSummariesV1' in self.cache: self.cache['marketSummariesV1']['time'] = time.time() self.lock.release()", "Any]]: \"\"\" Get a range of ticks (closing values and closing times) for", "If True, will perform backoff and retry on explicit failure response from the", "list of the raw tick data from the API, or None if an", "return None for tick in results[0]: close_datetime = datetime.strptime(tick['T'], TIME_FORMAT) tick['T'] = close_datetime.replace(tzinfo=timezone.utc).timestamp()", "get_tick_range(self, pair: str, start_time: float, end_time: float) -> List[Dict[str, Any]]: \"\"\" Get a", "or results[0] is None: self.log.error(\"Failed getting market summaries: status {}, results {}.\", status,", "scope=self) \"\"\" Object logger. \"\"\" self.lock = asyncio.Lock() \"\"\" Lock used for syncing", "API method. Implements retry and exponentional backoff for HTTP level error conditions. Arguments:", "errors in extract paths will not be retried. retry_fail: If True, will perform", "params, json.dumps(data, indent=2)) if not data['success'] and retry_fail: retry = True try: reason", "any exception produced from an extract operation. Arguments: ex: Exception returned from :meth:`_extract_items`.", "= { 'balance': {} } \"\"\" Response cache. \"\"\" self.tick_interval_str: str \"\"\" String", "ex = await self._extract_items(extract, data) retry, reason = await self._handle_extract_exception(ex, data, retry_data) if", "str, params: Sequence[Any]=None): \"\"\" Call a Bittrex API method. Implements retry and exponentional", "(may be None). On a 200 response with a missing response body, None.", "The current close price, or None if an error occurred. float: The current", "(data, status) if log: self.log.debug(\"API method '{}({})' response:\\n{}\", method, params, json.dumps(data, indent=2)) if", "{ 'path': 'v1.1/market/cancel', 'params': 'uuid={}', 'auth': True }, 'getOrder': { 'path': 'v1.1/account/getorder', 'params':", "None while attempt < config['http_max_retries']: raw_data, status = await self.call(method, params) if status", "{}\".format(method), reason) retry = False else: break if reason is not None: self.log.error(\"Giving", "except KeyError: reason = \"success == false (missing message)\" if not retry: results,", "response body, None. status (int): The HTTP response status code. A value of", "in [0, 408, 429]): retry_reason = 'status {}'.format(status) retry = True else: self.log.error('Got", "__author__ = '<NAME> <$(echo nqnz.enshfr#tznvy.pbz | tr a-z# n-za-m@)>' __version__ = \"0.2.0\" __all__", "perform backoff and retry on empty or missing data items. Syntax errors in", "Dict of the parsed API response. ex: Exception thrown as a result of", "'auth': False }, 'getMarketSummariesV1': { 'path': 'v1.1/public/getMarketSummaries', 'params': '', 'auth': False }, 'getTicks':" ]
[ "API \"\"\" if not is_team_guid(guid): raise InvalidGuid(\"'{}' is not a valid team GUID.\".format(guid))", ":raise ApiCallFailed: something went wrong while calling API \"\"\" if not is_team_guid(guid): raise", "basketball team :rtype: [dict] :return: a list of dictionaries containing information about team's", "raise InvalidGuid(\"'{}' is not a valid team GUID.\".format(guid)) url = API_BASE_URL + \"TeamMatchesByGuid?teamGuid={}\".format(guid)", "about team's games :raise ApiCallFailed: something went wrong while calling API \"\"\" if", "bvlapi.guid.team import is_team_guid def get_matches_by_guid(guid): \"\"\" Calls API to retrieve information about a", "import call_api from bvlapi.api.settings import API_BASE_URL from bvlapi.common.exceptions import InvalidGuid from bvlapi.guid.team import", "Calls API to retrieve information about a basketball team's season. :param str guid:", "API for information about a team's games. from bvlapi.api.call import call_api from bvlapi.api.settings", "basketball team's season. :param str guid: GUID of basketball team :rtype: [dict] :return:", "call API for information about a team's games. from bvlapi.api.call import call_api from", "guid: GUID of basketball team :rtype: [dict] :return: a list of dictionaries containing", "a team's games. from bvlapi.api.call import call_api from bvlapi.api.settings import API_BASE_URL from bvlapi.common.exceptions", "def get_matches_by_guid(guid): \"\"\" Calls API to retrieve information about a basketball team's season.", "about a basketball team's season. :param str guid: GUID of basketball team :rtype:", "games. from bvlapi.api.call import call_api from bvlapi.api.settings import API_BASE_URL from bvlapi.common.exceptions import InvalidGuid", "information about team's games :raise ApiCallFailed: something went wrong while calling API \"\"\"", "wrong while calling API \"\"\" if not is_team_guid(guid): raise InvalidGuid(\"'{}' is not a", "from bvlapi.api.settings import API_BASE_URL from bvlapi.common.exceptions import InvalidGuid from bvlapi.guid.team import is_team_guid def", "call_api from bvlapi.api.settings import API_BASE_URL from bvlapi.common.exceptions import InvalidGuid from bvlapi.guid.team import is_team_guid", "Contains function to call API for information about a team's games. from bvlapi.api.call", "from bvlapi.guid.team import is_team_guid def get_matches_by_guid(guid): \"\"\" Calls API to retrieve information about", "team's games. from bvlapi.api.call import call_api from bvlapi.api.settings import API_BASE_URL from bvlapi.common.exceptions import", "ApiCallFailed: something went wrong while calling API \"\"\" if not is_team_guid(guid): raise InvalidGuid(\"'{}'", "import InvalidGuid from bvlapi.guid.team import is_team_guid def get_matches_by_guid(guid): \"\"\" Calls API to retrieve", "is not a valid team GUID.\".format(guid)) url = API_BASE_URL + \"TeamMatchesByGuid?teamGuid={}\".format(guid) return call_api(url)", "season. :param str guid: GUID of basketball team :rtype: [dict] :return: a list", "\"\"\" if not is_team_guid(guid): raise InvalidGuid(\"'{}' is not a valid team GUID.\".format(guid)) url", "team :rtype: [dict] :return: a list of dictionaries containing information about team's games", "information about a basketball team's season. :param str guid: GUID of basketball team", "str guid: GUID of basketball team :rtype: [dict] :return: a list of dictionaries", "-*- # Contains function to call API for information about a team's games.", "to call API for information about a team's games. from bvlapi.api.call import call_api", "a basketball team's season. :param str guid: GUID of basketball team :rtype: [dict]", "not is_team_guid(guid): raise InvalidGuid(\"'{}' is not a valid team GUID.\".format(guid)) url = API_BASE_URL", "retrieve information about a basketball team's season. :param str guid: GUID of basketball", "is_team_guid(guid): raise InvalidGuid(\"'{}' is not a valid team GUID.\".format(guid)) url = API_BASE_URL +", "GUID of basketball team :rtype: [dict] :return: a list of dictionaries containing information", "if not is_team_guid(guid): raise InvalidGuid(\"'{}' is not a valid team GUID.\".format(guid)) url =", "bvlapi.api.settings import API_BASE_URL from bvlapi.common.exceptions import InvalidGuid from bvlapi.guid.team import is_team_guid def get_matches_by_guid(guid):", "dictionaries containing information about team's games :raise ApiCallFailed: something went wrong while calling", "of dictionaries containing information about team's games :raise ApiCallFailed: something went wrong while", "for information about a team's games. from bvlapi.api.call import call_api from bvlapi.api.settings import", "games :raise ApiCallFailed: something went wrong while calling API \"\"\" if not is_team_guid(guid):", "while calling API \"\"\" if not is_team_guid(guid): raise InvalidGuid(\"'{}' is not a valid", "import is_team_guid def get_matches_by_guid(guid): \"\"\" Calls API to retrieve information about a basketball", "API to retrieve information about a basketball team's season. :param str guid: GUID", "python3 # -*- coding: utf-8 -*- # Contains function to call API for", "-*- coding: utf-8 -*- # Contains function to call API for information about", "to retrieve information about a basketball team's season. :param str guid: GUID of", "containing information about team's games :raise ApiCallFailed: something went wrong while calling API", "is_team_guid def get_matches_by_guid(guid): \"\"\" Calls API to retrieve information about a basketball team's", "import API_BASE_URL from bvlapi.common.exceptions import InvalidGuid from bvlapi.guid.team import is_team_guid def get_matches_by_guid(guid): \"\"\"", "something went wrong while calling API \"\"\" if not is_team_guid(guid): raise InvalidGuid(\"'{}' is", "coding: utf-8 -*- # Contains function to call API for information about a", "bvlapi.api.call import call_api from bvlapi.api.settings import API_BASE_URL from bvlapi.common.exceptions import InvalidGuid from bvlapi.guid.team", "\"\"\" Calls API to retrieve information about a basketball team's season. :param str", "# -*- coding: utf-8 -*- # Contains function to call API for information", "from bvlapi.common.exceptions import InvalidGuid from bvlapi.guid.team import is_team_guid def get_matches_by_guid(guid): \"\"\" Calls API", "InvalidGuid from bvlapi.guid.team import is_team_guid def get_matches_by_guid(guid): \"\"\" Calls API to retrieve information", "API_BASE_URL from bvlapi.common.exceptions import InvalidGuid from bvlapi.guid.team import is_team_guid def get_matches_by_guid(guid): \"\"\" Calls", "utf-8 -*- # Contains function to call API for information about a team's", ":param str guid: GUID of basketball team :rtype: [dict] :return: a list of", "list of dictionaries containing information about team's games :raise ApiCallFailed: something went wrong", "went wrong while calling API \"\"\" if not is_team_guid(guid): raise InvalidGuid(\"'{}' is not", "# Contains function to call API for information about a team's games. from", "a list of dictionaries containing information about team's games :raise ApiCallFailed: something went", "information about a team's games. from bvlapi.api.call import call_api from bvlapi.api.settings import API_BASE_URL", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Contains function to call API", "InvalidGuid(\"'{}' is not a valid team GUID.\".format(guid)) url = API_BASE_URL + \"TeamMatchesByGuid?teamGuid={}\".format(guid) return", "team's season. :param str guid: GUID of basketball team :rtype: [dict] :return: a", "get_matches_by_guid(guid): \"\"\" Calls API to retrieve information about a basketball team's season. :param", "of basketball team :rtype: [dict] :return: a list of dictionaries containing information about", "team's games :raise ApiCallFailed: something went wrong while calling API \"\"\" if not", "about a team's games. from bvlapi.api.call import call_api from bvlapi.api.settings import API_BASE_URL from", ":return: a list of dictionaries containing information about team's games :raise ApiCallFailed: something", "from bvlapi.api.call import call_api from bvlapi.api.settings import API_BASE_URL from bvlapi.common.exceptions import InvalidGuid from", "function to call API for information about a team's games. from bvlapi.api.call import", "bvlapi.common.exceptions import InvalidGuid from bvlapi.guid.team import is_team_guid def get_matches_by_guid(guid): \"\"\" Calls API to", "[dict] :return: a list of dictionaries containing information about team's games :raise ApiCallFailed:", "calling API \"\"\" if not is_team_guid(guid): raise InvalidGuid(\"'{}' is not a valid team", ":rtype: [dict] :return: a list of dictionaries containing information about team's games :raise" ]
[]
[ "'src2', 'target': 'targ2'}, ] return render_template('project.html', segments=segments) @app.route('/import/<project_id>') @app.route('/import/<project_id>/<lang>') def import_segments(project_id, lang=None): segments", "{'id': 'first', 'name': 'first project'} ] return render_template('dashboard.html', session=session, projects=projects) @app.route('/login', methods=['GET', 'POST'])", "{'key': 's1', 'src': 'src1', 'target': 'targ1'}, {'key': 's2', 'src': 'src2', 'target': 'targ2'}, ]", "if request.method == 'POST': session['username'] = request.form['username'] return redirect(url_for('dashboard')) return render_template('login.html') @app.route('/logout') def", "from transik import app @app.route('/') def dashboard(): projects = [ {'id': 'first', 'name':", "redirect(url_for('dashboard')) return render_template('login.html') @app.route('/logout') def logout(): # remove the username from the session", "return render_template('login.html') @app.route('/logout') def logout(): # remove the username from the session if", "'POST': session['username'] = request.form['username'] return redirect(url_for('dashboard')) return render_template('login.html') @app.route('/logout') def logout(): # remove", "the session if it's there session.pop('username', None) return redirect(url_for('dashboard')) @app.route('/project/<id>') def project(id): segments", "return render_template('project.html', segments=segments) @app.route('/import/<project_id>') @app.route('/import/<project_id>/<lang>') def import_segments(project_id, lang=None): segments = [ {'key': 's1',", "] return render_template('project.html', segments=segments) @app.route('/export/<project_id>') @app.route('/export/<project_id>/<lang>') def export_segments(project_id, lang=None): segments = [ {'key':", "it's there session.pop('username', None) return redirect(url_for('dashboard')) @app.route('/project/<id>') def project(id): segments = [ {'key':", "= [ {'id': 'first', 'name': 'first project'} ] return render_template('dashboard.html', session=session, projects=projects) @app.route('/login',", "@app.route('/export/<project_id>/<lang>') def export_segments(project_id, lang=None): segments = [ {'key': 's1', 'src': 'src1', 'target': 'targ1'},", "session['username'] = request.form['username'] return redirect(url_for('dashboard')) return render_template('login.html') @app.route('/logout') def logout(): # remove the", "render_template('project.html', segments=segments) @app.route('/import/<project_id>') @app.route('/import/<project_id>/<lang>') def import_segments(project_id, lang=None): segments = [ {'key': 's1', 'src':", "def login(): if request.method == 'POST': session['username'] = request.form['username'] return redirect(url_for('dashboard')) return render_template('login.html')", "= request.form['username'] return redirect(url_for('dashboard')) return render_template('login.html') @app.route('/logout') def logout(): # remove the username", "'src': 'src2', 'target': 'targ2'}, ] return render_template('project.html', segments=segments) @app.route('/export/<project_id>') @app.route('/export/<project_id>/<lang>') def export_segments(project_id, lang=None):", "render_template('project.html', segments=segments) @app.route('/export/<project_id>') @app.route('/export/<project_id>/<lang>') def export_segments(project_id, lang=None): segments = [ {'key': 's1', 'src':", "session if it's there session.pop('username', None) return redirect(url_for('dashboard')) @app.route('/project/<id>') def project(id): segments =", "'first project'} ] return render_template('dashboard.html', session=session, projects=projects) @app.route('/login', methods=['GET', 'POST']) def login(): if", "def export_segments(project_id, lang=None): segments = [ {'key': 's1', 'src': 'src1', 'target': 'targ1'}, {'key':", "return redirect(url_for('dashboard')) return render_template('login.html') @app.route('/logout') def logout(): # remove the username from the", "segments=segments) @app.route('/import/<project_id>') @app.route('/import/<project_id>/<lang>') def import_segments(project_id, lang=None): segments = [ {'key': 's1', 'src': 'src1',", "segments = [ {'key': 's1', 'src': 'src1', 'target': 'targ1'}, {'key': 's2', 'src': 'src2',", "session, url_for from transik import app @app.route('/') def dashboard(): projects = [ {'id':", "export_segments(project_id, lang=None): segments = [ {'key': 's1', 'src': 'src1', 'target': 'targ1'}, {'key': 's2',", "'target': 'targ2'}, ] return render_template('project.html', segments=segments) @app.route('/import/<project_id>') @app.route('/import/<project_id>/<lang>') def import_segments(project_id, lang=None): segments =", "@app.route('/logout') def logout(): # remove the username from the session if it's there", "from the session if it's there session.pop('username', None) return redirect(url_for('dashboard')) @app.route('/project/<id>') def project(id):", "@app.route('/import/<project_id>/<lang>') def import_segments(project_id, lang=None): segments = [ {'key': 's1', 'src': 'src1', 'target': 'targ1'},", "'target': 'targ1'}, {'key': 's2', 'src': 'src2', 'target': 'targ2'}, ] return render_template('project.html', segments=segments) @app.route('/import/<project_id>')", "<gh_stars>0 from flask import flash, render_template, redirect, request, session, url_for from transik import", "remove the username from the session if it's there session.pop('username', None) return redirect(url_for('dashboard'))", "'s1', 'src': 'src1', 'target': 'targ1'}, {'key': 's2', 'src': 'src2', 'target': 'targ2'}, ] return", "segments=segments) @app.route('/export/<project_id>') @app.route('/export/<project_id>/<lang>') def export_segments(project_id, lang=None): segments = [ {'key': 's1', 'src': 'src1',", "'src': 'src2', 'target': 'targ2'}, ] return render_template('project.html', segments=segments) @app.route('/import/<project_id>') @app.route('/import/<project_id>/<lang>') def import_segments(project_id, lang=None):", "username from the session if it's there session.pop('username', None) return redirect(url_for('dashboard')) @app.route('/project/<id>') def", "'targ1'}, {'key': 's2', 'src': 'src2', 'target': 'targ2'}, ] return render_template('project.html', segments=segments) @app.route('/import/<project_id>') @app.route('/import/<project_id>/<lang>')", "flask import flash, render_template, redirect, request, session, url_for from transik import app @app.route('/')", "lang=None): segments = [ {'key': 's1', 'src': 'src1', 'target': 'targ1'}, {'key': 's2', 'src':", "flash, render_template, redirect, request, session, url_for from transik import app @app.route('/') def dashboard():", "'targ2'}, ] return render_template('project.html', segments=segments) @app.route('/import/<project_id>') @app.route('/import/<project_id>/<lang>') def import_segments(project_id, lang=None): segments = [", "'first', 'name': 'first project'} ] return render_template('dashboard.html', session=session, projects=projects) @app.route('/login', methods=['GET', 'POST']) def", "import_segments(project_id, lang=None): segments = [ {'key': 's1', 'src': 'src1', 'target': 'targ1'}, {'key': 's2',", "] return render_template('project.html', segments=segments) @app.route('/import/<project_id>') @app.route('/import/<project_id>/<lang>') def import_segments(project_id, lang=None): segments = [ {'key':", "= [ {'key': 's1', 'src': 'src1', 'target': 'targ1'}, {'key': 's2', 'src': 'src2', 'target':", "def import_segments(project_id, lang=None): segments = [ {'key': 's1', 'src': 'src1', 'target': 'targ1'}, {'key':", "return redirect(url_for('dashboard')) @app.route('/project/<id>') def project(id): segments = [ {'key': 's1', 'src': 'src1', 'target':", "'target': 'targ1'}, {'key': 's2', 'src': 'src2', 'target': 'targ2'}, ] return render_template('project.html', segments=segments) @app.route('/export/<project_id>')", "'s2', 'src': 'src2', 'target': 'targ2'}, ] return render_template('project.html', segments=segments) @app.route('/export/<project_id>') @app.route('/export/<project_id>/<lang>') def export_segments(project_id,", "request.method == 'POST': session['username'] = request.form['username'] return redirect(url_for('dashboard')) return render_template('login.html') @app.route('/logout') def logout():", "def dashboard(): projects = [ {'id': 'first', 'name': 'first project'} ] return render_template('dashboard.html',", "'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username'] return redirect(url_for('dashboard')) return", "def logout(): # remove the username from the session if it's there session.pop('username',", "{'key': 's2', 'src': 'src2', 'target': 'targ2'}, ] return render_template('project.html', segments=segments) @app.route('/export/<project_id>') @app.route('/export/<project_id>/<lang>') def", "'targ2'}, ] return render_template('project.html', segments=segments) @app.route('/export/<project_id>') @app.route('/export/<project_id>/<lang>') def export_segments(project_id, lang=None): segments = [", "from flask import flash, render_template, redirect, request, session, url_for from transik import app", "return render_template('project.html', segments=segments) @app.route('/export/<project_id>') @app.route('/export/<project_id>/<lang>') def export_segments(project_id, lang=None): segments = [ {'key': 's1',", "render_template, redirect, request, session, url_for from transik import app @app.route('/') def dashboard(): projects", "# remove the username from the session if it's there session.pop('username', None) return", "session.pop('username', None) return redirect(url_for('dashboard')) @app.route('/project/<id>') def project(id): segments = [ {'key': 's1', 'src':", "== 'POST': session['username'] = request.form['username'] return redirect(url_for('dashboard')) return render_template('login.html') @app.route('/logout') def logout(): #", "render_template('login.html') @app.route('/logout') def logout(): # remove the username from the session if it's", "None) return redirect(url_for('dashboard')) @app.route('/project/<id>') def project(id): segments = [ {'key': 's1', 'src': 'src1',", "@app.route('/project/<id>') def project(id): segments = [ {'key': 's1', 'src': 'src1', 'target': 'targ1'}, {'key':", "] return render_template('dashboard.html', session=session, projects=projects) @app.route('/login', methods=['GET', 'POST']) def login(): if request.method ==", "@app.route('/export/<project_id>') @app.route('/export/<project_id>/<lang>') def export_segments(project_id, lang=None): segments = [ {'key': 's1', 'src': 'src1', 'target':", "'src2', 'target': 'targ2'}, ] return render_template('project.html', segments=segments) @app.route('/export/<project_id>') @app.route('/export/<project_id>/<lang>') def export_segments(project_id, lang=None): segments", "request.form['username'] return redirect(url_for('dashboard')) return render_template('login.html') @app.route('/logout') def logout(): # remove the username from", "[ {'key': 's1', 'src': 'src1', 'target': 'targ1'}, {'key': 's2', 'src': 'src2', 'target': 'targ2'},", "render_template('dashboard.html', session=session, projects=projects) @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username']", "[ {'id': 'first', 'name': 'first project'} ] return render_template('dashboard.html', session=session, projects=projects) @app.route('/login', methods=['GET',", "logout(): # remove the username from the session if it's there session.pop('username', None)", "transik import app @app.route('/') def dashboard(): projects = [ {'id': 'first', 'name': 'first", "return render_template('dashboard.html', session=session, projects=projects) @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST':", "project(id): segments = [ {'key': 's1', 'src': 'src1', 'target': 'targ1'}, {'key': 's2', 'src':", "'src1', 'target': 'targ1'}, {'key': 's2', 'src': 'src2', 'target': 'targ2'}, ] return render_template('project.html', segments=segments)", "import app @app.route('/') def dashboard(): projects = [ {'id': 'first', 'name': 'first project'}", "redirect, request, session, url_for from transik import app @app.route('/') def dashboard(): projects =", "def project(id): segments = [ {'key': 's1', 'src': 'src1', 'target': 'targ1'}, {'key': 's2',", "login(): if request.method == 'POST': session['username'] = request.form['username'] return redirect(url_for('dashboard')) return render_template('login.html') @app.route('/logout')", "'targ1'}, {'key': 's2', 'src': 'src2', 'target': 'targ2'}, ] return render_template('project.html', segments=segments) @app.route('/export/<project_id>') @app.route('/export/<project_id>/<lang>')", "if it's there session.pop('username', None) return redirect(url_for('dashboard')) @app.route('/project/<id>') def project(id): segments = [", "'src': 'src1', 'target': 'targ1'}, {'key': 's2', 'src': 'src2', 'target': 'targ2'}, ] return render_template('project.html',", "the username from the session if it's there session.pop('username', None) return redirect(url_for('dashboard')) @app.route('/project/<id>')", "@app.route('/import/<project_id>') @app.route('/import/<project_id>/<lang>') def import_segments(project_id, lang=None): segments = [ {'key': 's1', 'src': 'src1', 'target':", "session=session, projects=projects) @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] =", "@app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username'] return", "redirect(url_for('dashboard')) @app.route('/project/<id>') def project(id): segments = [ {'key': 's1', 'src': 'src1', 'target': 'targ1'},", "projects=projects) @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username']", "'target': 'targ2'}, ] return render_template('project.html', segments=segments) @app.route('/export/<project_id>') @app.route('/export/<project_id>/<lang>') def export_segments(project_id, lang=None): segments =", "dashboard(): projects = [ {'id': 'first', 'name': 'first project'} ] return render_template('dashboard.html', session=session,", "there session.pop('username', None) return redirect(url_for('dashboard')) @app.route('/project/<id>') def project(id): segments = [ {'key': 's1',", "methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username'] return redirect(url_for('dashboard'))", "project'} ] return render_template('dashboard.html', session=session, projects=projects) @app.route('/login', methods=['GET', 'POST']) def login(): if request.method", "url_for from transik import app @app.route('/') def dashboard(): projects = [ {'id': 'first',", "{'key': 's2', 'src': 'src2', 'target': 'targ2'}, ] return render_template('project.html', segments=segments) @app.route('/import/<project_id>') @app.route('/import/<project_id>/<lang>') def", "app @app.route('/') def dashboard(): projects = [ {'id': 'first', 'name': 'first project'} ]", "@app.route('/') def dashboard(): projects = [ {'id': 'first', 'name': 'first project'} ] return", "'name': 'first project'} ] return render_template('dashboard.html', session=session, projects=projects) @app.route('/login', methods=['GET', 'POST']) def login():", "projects = [ {'id': 'first', 'name': 'first project'} ] return render_template('dashboard.html', session=session, projects=projects)", "'s2', 'src': 'src2', 'target': 'targ2'}, ] return render_template('project.html', segments=segments) @app.route('/import/<project_id>') @app.route('/import/<project_id>/<lang>') def import_segments(project_id,", "request, session, url_for from transik import app @app.route('/') def dashboard(): projects = [", "import flash, render_template, redirect, request, session, url_for from transik import app @app.route('/') def" ]
[ "db.execute(\"select id, owner, editor, title from posts\") as cursor: async for row in", "web router = web.RouteTableDef() async def fetch_post(db: aiosqlite.Connection, post_id: int) -> Dict[str, Any]:", "title=?, text=? where id =?\", [post[\"title\"], post[\"text\"], post_id], ) await db.commit() raise web.HTTPSeeOther(location=f\"/{post_id}\")", "owner = \"Anonymous\" await db.execute( \"insert into posts (owner, editor, title, text) values", "posts where id=?\", [post_id]) raise web.HTTPSeeOther(location=f\"/\") def get_db_path() -> Path: here = Path.cwd()", "= request.match_info[\"post\"] if post_id.endswith(\".ico\"): raise web.HTTPSeeOther(location=f\"/\") db = request.config_dict[\"DB\"] return {\"post\": await fetch_post(db,", "as conn: cur = conn.cursor() cur.execute( \"\"\"CREATE TABLE posts ( id INTEGER PRIMARY", "app.add_routes(router) app.cleanup_ctx.append(init_db) aiohttp_jinja2.setup( app, loader=jinja2.FileSystemLoader(str(Path.cwd() / \"templates\")) ) return app def try_make_db() ->", "/ \"templates\")) ) return app def try_make_db() -> None: sqlite_db = get_db_path() if", "editor, title, text from posts where id = ?\", [post_id] ) as cursor:", "init_db(app: web.Application) -> AsyncIterator[None]: sqlite_db = get_db_path() db = await aiosqlite.connect(sqlite_db) db.row_factory =", "row[\"editor\"], \"title\": row[\"title\"], \"text\": row[\"text\"], } @router.get(\"/\") @aiohttp_jinja2.template(\"index.html\") async def index(request: web.Request) ->", "new_post_apply(request: web.Request) -> Dict[str, Any]: db = request.config_dict[\"DB\"] post = await request.post() owner", "app.cleanup_ctx.append(init_db) aiohttp_jinja2.setup( app, loader=jinja2.FileSystemLoader(str(Path.cwd() / \"templates\")) ) return app def try_make_db() -> None:", "cur = conn.cursor() cur.execute( \"\"\"CREATE TABLE posts ( id INTEGER PRIMARY KEY, title", "{post_id} does not exist\") return { \"id\": post_id, \"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\":", "async for row in cursor: ret.append( { \"id\": row[\"id\"], \"owner\": row[\"owner\"], \"editor\": row[\"editor\"],", "import sqlite3 from pathlib import Path from typing import Any, AsyncIterator, Dict import", "get_db_path() if sqlite_db.exists(): return with sqlite3.connect(sqlite_db) as conn: cur = conn.cursor() cur.execute( \"\"\"CREATE", "def init_app() -> web.Application: app = web.Application() app.add_routes(router) app.cleanup_ctx.append(init_db) aiohttp_jinja2.setup( app, loader=jinja2.FileSystemLoader(str(Path.cwd() /", "{ \"id\": row[\"id\"], \"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"], } ) return {\"posts\":", "AsyncIterator[None]: sqlite_db = get_db_path() db = await aiosqlite.connect(sqlite_db) db.row_factory = aiosqlite.Row app[\"DB\"] =", "async def fetch_post(db: aiosqlite.Connection, post_id: int) -> Dict[str, Any]: async with db.execute( \"select", "TABLE posts ( id INTEGER PRIMARY KEY, title TEXT, text TEXT, owner TEXT,", "exist\") return { \"id\": post_id, \"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"], \"text\": row[\"text\"],", "= Path.cwd() return here / \"db.sqlite3\" async def init_db(app: web.Application) -> AsyncIterator[None]: sqlite_db", "sqlite_db = get_db_path() db = await aiosqlite.connect(sqlite_db) db.row_factory = aiosqlite.Row app[\"DB\"] = db", "= ?\", [post_id] ) as cursor: row = await cursor.fetchone() print(row) if row", "id=?\", [post_id]) raise web.HTTPSeeOther(location=f\"/\") def get_db_path() -> Path: here = Path.cwd() return here", "post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] await db.execute(\"delete from posts where id=?\", [post_id])", "ret.append( { \"id\": row[\"id\"], \"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"], } ) return", "= request.config_dict[\"DB\"] async with db.execute(\"select id, owner, editor, title from posts\") as cursor:", "{\"posts\": ret} @router.get(\"/new\") @aiohttp_jinja2.template(\"new.html\") async def new_post(request: web.Request) -> Dict[str, Any]: return {}", "\"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"], \"text\": row[\"text\"], } @router.get(\"/\") @aiohttp_jinja2.template(\"index.html\") async def", "row is None: raise RuntimeError(f\"Post {post_id} does not exist\") return { \"id\": post_id,", "web.Application() app.add_routes(router) app.cleanup_ctx.append(init_db) aiohttp_jinja2.setup( app, loader=jinja2.FileSystemLoader(str(Path.cwd() / \"templates\")) ) return app def try_make_db()", "row[\"title\"], } ) return {\"posts\": ret} @router.get(\"/new\") @aiohttp_jinja2.template(\"new.html\") async def new_post(request: web.Request) ->", "[owner, owner, post[\"title\"], post[\"text\"]], ) await db.commit() raise web.HTTPSeeOther(location=f\"/\") @router.get(\"/{post}\") @aiohttp_jinja2.template(\"view.html\") async def", "await db.commit() raise web.HTTPSeeOther(location=f\"/\") @router.get(\"/{post}\") @aiohttp_jinja2.template(\"view.html\") async def view_post(request: web.Request) -> Dict[str, Any]:", "db.row_factory = aiosqlite.Row app[\"DB\"] = db yield await db.close() async def init_app() ->", "import web router = web.RouteTableDef() async def fetch_post(db: aiosqlite.Connection, post_id: int) -> Dict[str,", "post = await request.post() owner = \"Anonymous\" await db.execute( \"insert into posts (owner,", "post = await request.post() await db.execute( \"update posts set title=?, text=? where id", "db = request.config_dict[\"DB\"] return {\"post\": await fetch_post(db, post_id)} @router.post(\"/{post}/edit\") async def edit_post_apply(request: web.Request)", "row in cursor: ret.append( { \"id\": row[\"id\"], \"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"],", "request.config_dict[\"DB\"] return {\"post\": await fetch_post(db, post_id)} @router.get(\"/{post}/edit\") @aiohttp_jinja2.template(\"edit.html\") async def edit_post(request: web.Request) ->", "aiosqlite import jinja2 from aiohttp import web router = web.RouteTableDef() async def fetch_post(db:", "aiosqlite.Connection, post_id: int) -> Dict[str, Any]: async with db.execute( \"select owner, editor, title,", "Dict[str, Any]: async with db.execute( \"select owner, editor, title, text from posts where", "def fetch_post(db: aiosqlite.Connection, post_id: int) -> Dict[str, Any]: async with db.execute( \"select owner,", "if sqlite_db.exists(): return with sqlite3.connect(sqlite_db) as conn: cur = conn.cursor() cur.execute( \"\"\"CREATE TABLE", "Any, AsyncIterator, Dict import aiohttp_jinja2 import aiosqlite import jinja2 from aiohttp import web", "async def index(request: web.Request) -> Dict[str, Any]: ret = [] db = request.config_dict[\"DB\"]", "@aiohttp_jinja2.template(\"edit.html\") async def edit_post(request: web.Request) -> Dict[str, Any]: post_id = request.match_info[\"post\"] db =", "@aiohttp_jinja2.template(\"index.html\") async def index(request: web.Request) -> Dict[str, Any]: ret = [] db =", "raise web.HTTPSeeOther(location=f\"/\") @router.get(\"/{post}\") @aiohttp_jinja2.template(\"view.html\") async def view_post(request: web.Request) -> Dict[str, Any]: post_id =", "= request.config_dict[\"DB\"] return {\"post\": await fetch_post(db, post_id)} @router.post(\"/{post}/edit\") async def edit_post_apply(request: web.Request) ->", "fetch_post(db: aiosqlite.Connection, post_id: int) -> Dict[str, Any]: async with db.execute( \"select owner, editor,", "posts\") as cursor: async for row in cursor: ret.append( { \"id\": row[\"id\"], \"owner\":", "return with sqlite3.connect(sqlite_db) as conn: cur = conn.cursor() cur.execute( \"\"\"CREATE TABLE posts (", "async def init_app() -> web.Application: app = web.Application() app.add_routes(router) app.cleanup_ctx.append(init_db) aiohttp_jinja2.setup( app, loader=jinja2.FileSystemLoader(str(Path.cwd()", "print(row) if row is None: raise RuntimeError(f\"Post {post_id} does not exist\") return {", "request.match_info[\"post\"] db = request.config_dict[\"DB\"] post = await request.post() await db.execute( \"update posts set", "= await request.post() await db.execute( \"update posts set title=?, text=? where id =?\",", "db = request.config_dict[\"DB\"] return {\"post\": await fetch_post(db, post_id)} @router.get(\"/{post}/edit\") @aiohttp_jinja2.template(\"edit.html\") async def edit_post(request:", "-> Dict[str, Any]: ret = [] db = request.config_dict[\"DB\"] async with db.execute(\"select id,", "@router.get(\"/{post}/edit\") @aiohttp_jinja2.template(\"edit.html\") async def edit_post(request: web.Request) -> Dict[str, Any]: post_id = request.match_info[\"post\"] db", "delete_post(request: web.Request) -> web.Response: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] await db.execute(\"delete from", "def new_post_apply(request: web.Request) -> Dict[str, Any]: db = request.config_dict[\"DB\"] post = await request.post()", "@router.get(\"/{post}/delete\") async def delete_post(request: web.Request) -> web.Response: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"]", "sqlite3 from pathlib import Path from typing import Any, AsyncIterator, Dict import aiohttp_jinja2", "post[\"text\"], post_id], ) await db.commit() raise web.HTTPSeeOther(location=f\"/{post_id}\") @router.get(\"/{post}/delete\") async def delete_post(request: web.Request) ->", "sqlite_db = get_db_path() if sqlite_db.exists(): return with sqlite3.connect(sqlite_db) as conn: cur = conn.cursor()", "import Path from typing import Any, AsyncIterator, Dict import aiohttp_jinja2 import aiosqlite import", "if row is None: raise RuntimeError(f\"Post {post_id} does not exist\") return { \"id\":", "def new_post(request: web.Request) -> Dict[str, Any]: return {} @router.post(\"/new\") @aiohttp_jinja2.template(\"edit.html\") async def new_post_apply(request:", "post_id: int) -> Dict[str, Any]: async with db.execute( \"select owner, editor, title, text", "= conn.cursor() cur.execute( \"\"\"CREATE TABLE posts ( id INTEGER PRIMARY KEY, title TEXT,", "-> Dict[str, Any]: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] return {\"post\": await fetch_post(db,", "db.execute( \"select owner, editor, title, text from posts where id = ?\", [post_id]", "row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"], \"text\": row[\"text\"], } @router.get(\"/\") @aiohttp_jinja2.template(\"index.html\") async def index(request:", "posts (owner, editor, title, text) values (?,?,?,?)\", [owner, owner, post[\"title\"], post[\"text\"]], ) await", "-> None: sqlite_db = get_db_path() if sqlite_db.exists(): return with sqlite3.connect(sqlite_db) as conn: cur", "@aiohttp_jinja2.template(\"view.html\") async def view_post(request: web.Request) -> Dict[str, Any]: post_id = request.match_info[\"post\"] if post_id.endswith(\".ico\"):", "None: raise RuntimeError(f\"Post {post_id} does not exist\") return { \"id\": post_id, \"owner\": row[\"owner\"],", "\"Anonymous\" await db.execute( \"insert into posts (owner, editor, title, text) values (?,?,?,?)\", [owner,", "= get_db_path() db = await aiosqlite.connect(sqlite_db) db.row_factory = aiosqlite.Row app[\"DB\"] = db yield", "[post[\"title\"], post[\"text\"], post_id], ) await db.commit() raise web.HTTPSeeOther(location=f\"/{post_id}\") @router.get(\"/{post}/delete\") async def delete_post(request: web.Request)", "\"editor\": row[\"editor\"], \"title\": row[\"title\"], \"text\": row[\"text\"], } @router.get(\"/\") @aiohttp_jinja2.template(\"index.html\") async def index(request: web.Request)", "values (?,?,?,?)\", [owner, owner, post[\"title\"], post[\"text\"]], ) await db.commit() raise web.HTTPSeeOther(location=f\"/\") @router.get(\"/{post}\") @aiohttp_jinja2.template(\"view.html\")", "[post_id]) raise web.HTTPSeeOther(location=f\"/\") def get_db_path() -> Path: here = Path.cwd() return here /", "{} @router.post(\"/new\") @aiohttp_jinja2.template(\"edit.html\") async def new_post_apply(request: web.Request) -> Dict[str, Any]: db = request.config_dict[\"DB\"]", "def get_db_path() -> Path: here = Path.cwd() return here / \"db.sqlite3\" async def", "from aiohttp import web router = web.RouteTableDef() async def fetch_post(db: aiosqlite.Connection, post_id: int)", "web.HTTPSeeOther(location=f\"/\") @router.get(\"/{post}\") @aiohttp_jinja2.template(\"view.html\") async def view_post(request: web.Request) -> Dict[str, Any]: post_id = request.match_info[\"post\"]", "conn: cur = conn.cursor() cur.execute( \"\"\"CREATE TABLE posts ( id INTEGER PRIMARY KEY,", "Dict[str, Any]: post_id = request.match_info[\"post\"] if post_id.endswith(\".ico\"): raise web.HTTPSeeOther(location=f\"/\") db = request.config_dict[\"DB\"] return", "@router.get(\"/\") @aiohttp_jinja2.template(\"index.html\") async def index(request: web.Request) -> Dict[str, Any]: ret = [] db", "request.post() await db.execute( \"update posts set title=?, text=? where id =?\", [post[\"title\"], post[\"text\"],", "await db.execute(\"delete from posts where id=?\", [post_id]) raise web.HTTPSeeOther(location=f\"/\") def get_db_path() -> Path:", "web.Request) -> Dict[str, Any]: ret = [] db = request.config_dict[\"DB\"] async with db.execute(\"select", "with sqlite3.connect(sqlite_db) as conn: cur = conn.cursor() cur.execute( \"\"\"CREATE TABLE posts ( id", "get_db_path() db = await aiosqlite.connect(sqlite_db) db.row_factory = aiosqlite.Row app[\"DB\"] = db yield await", "request.config_dict[\"DB\"] await db.execute(\"delete from posts where id=?\", [post_id]) raise web.HTTPSeeOther(location=f\"/\") def get_db_path() ->", "def init_db(app: web.Application) -> AsyncIterator[None]: sqlite_db = get_db_path() db = await aiosqlite.connect(sqlite_db) db.row_factory", "await db.execute( \"update posts set title=?, text=? where id =?\", [post[\"title\"], post[\"text\"], post_id],", "post_id], ) await db.commit() raise web.HTTPSeeOther(location=f\"/{post_id}\") @router.get(\"/{post}/delete\") async def delete_post(request: web.Request) -> web.Response:", "} @router.get(\"/\") @aiohttp_jinja2.template(\"index.html\") async def index(request: web.Request) -> Dict[str, Any]: ret = []", "request.config_dict[\"DB\"] post = await request.post() owner = \"Anonymous\" await db.execute( \"insert into posts", "Dict[str, Any]: ret = [] db = request.config_dict[\"DB\"] async with db.execute(\"select id, owner,", "-> web.Response: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] await db.execute(\"delete from posts where", "row[\"id\"], \"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"], } ) return {\"posts\": ret} @router.get(\"/new\")", "def delete_post(request: web.Request) -> web.Response: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] await db.execute(\"delete", "for row in cursor: ret.append( { \"id\": row[\"id\"], \"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\":", "return {\"post\": await fetch_post(db, post_id)} @router.get(\"/{post}/edit\") @aiohttp_jinja2.template(\"edit.html\") async def edit_post(request: web.Request) -> Dict[str,", "None: sqlite_db = get_db_path() if sqlite_db.exists(): return with sqlite3.connect(sqlite_db) as conn: cur =", "@router.post(\"/{post}/edit\") async def edit_post_apply(request: web.Request) -> web.Response: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"]", "{\"post\": await fetch_post(db, post_id)} @router.post(\"/{post}/edit\") async def edit_post_apply(request: web.Request) -> web.Response: post_id =", "post_id.endswith(\".ico\"): raise web.HTTPSeeOther(location=f\"/\") db = request.config_dict[\"DB\"] return {\"post\": await fetch_post(db, post_id)} @router.get(\"/{post}/edit\") @aiohttp_jinja2.template(\"edit.html\")", "posts where id = ?\", [post_id] ) as cursor: row = await cursor.fetchone()", "title, text from posts where id = ?\", [post_id] ) as cursor: row", "app = web.Application() app.add_routes(router) app.cleanup_ctx.append(init_db) aiohttp_jinja2.setup( app, loader=jinja2.FileSystemLoader(str(Path.cwd() / \"templates\")) ) return app", "-> Dict[str, Any]: post_id = request.match_info[\"post\"] if post_id.endswith(\".ico\"): raise web.HTTPSeeOther(location=f\"/\") db = request.config_dict[\"DB\"]", "post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] return {\"post\": await fetch_post(db, post_id)} @router.post(\"/{post}/edit\") async", "= \"Anonymous\" await db.execute( \"insert into posts (owner, editor, title, text) values (?,?,?,?)\",", "edit_post_apply(request: web.Request) -> web.Response: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] post = await", "raise RuntimeError(f\"Post {post_id} does not exist\") return { \"id\": post_id, \"owner\": row[\"owner\"], \"editor\":", "posts set title=?, text=? where id =?\", [post[\"title\"], post[\"text\"], post_id], ) await db.commit()", "Dict import aiohttp_jinja2 import aiosqlite import jinja2 from aiohttp import web router =", "db.execute(\"delete from posts where id=?\", [post_id]) raise web.HTTPSeeOther(location=f\"/\") def get_db_path() -> Path: here", "async with db.execute(\"select id, owner, editor, title from posts\") as cursor: async for", "text=? where id =?\", [post[\"title\"], post[\"text\"], post_id], ) await db.commit() raise web.HTTPSeeOther(location=f\"/{post_id}\") @router.get(\"/{post}/delete\")", "text from posts where id = ?\", [post_id] ) as cursor: row =", "import Any, AsyncIterator, Dict import aiohttp_jinja2 import aiosqlite import jinja2 from aiohttp import", "-> web.Response: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] post = await request.post() await", "as cursor: async for row in cursor: ret.append( { \"id\": row[\"id\"], \"owner\": row[\"owner\"],", "in cursor: ret.append( { \"id\": row[\"id\"], \"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"], }", "db = await aiosqlite.connect(sqlite_db) db.row_factory = aiosqlite.Row app[\"DB\"] = db yield await db.close()", "\"update posts set title=?, text=? where id =?\", [post[\"title\"], post[\"text\"], post_id], ) await", "\"templates\")) ) return app def try_make_db() -> None: sqlite_db = get_db_path() if sqlite_db.exists():", "web.HTTPSeeOther(location=f\"/\") db = request.config_dict[\"DB\"] return {\"post\": await fetch_post(db, post_id)} @router.get(\"/{post}/edit\") @aiohttp_jinja2.template(\"edit.html\") async def", "db yield await db.close() async def init_app() -> web.Application: app = web.Application() app.add_routes(router)", "\"title\": row[\"title\"], } ) return {\"posts\": ret} @router.get(\"/new\") @aiohttp_jinja2.template(\"new.html\") async def new_post(request: web.Request)", "async def new_post_apply(request: web.Request) -> Dict[str, Any]: db = request.config_dict[\"DB\"] post = await", "post_id)} @router.get(\"/{post}/edit\") @aiohttp_jinja2.template(\"edit.html\") async def edit_post(request: web.Request) -> Dict[str, Any]: post_id = request.match_info[\"post\"]", "owner, editor, title, text from posts where id = ?\", [post_id] ) as", "def edit_post(request: web.Request) -> Dict[str, Any]: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] return", "owner, post[\"title\"], post[\"text\"]], ) await db.commit() raise web.HTTPSeeOther(location=f\"/\") @router.get(\"/{post}\") @aiohttp_jinja2.template(\"view.html\") async def view_post(request:", "await request.post() await db.execute( \"update posts set title=?, text=? where id =?\", [post[\"title\"],", "Any]: ret = [] db = request.config_dict[\"DB\"] async with db.execute(\"select id, owner, editor,", "= aiosqlite.Row app[\"DB\"] = db yield await db.close() async def init_app() -> web.Application:", "\"\"\"CREATE TABLE posts ( id INTEGER PRIMARY KEY, title TEXT, text TEXT, owner", "app, loader=jinja2.FileSystemLoader(str(Path.cwd() / \"templates\")) ) return app def try_make_db() -> None: sqlite_db =", "cursor: async for row in cursor: ret.append( { \"id\": row[\"id\"], \"owner\": row[\"owner\"], \"editor\":", "aiohttp_jinja2.setup( app, loader=jinja2.FileSystemLoader(str(Path.cwd() / \"templates\")) ) return app def try_make_db() -> None: sqlite_db", "post_id)} @router.post(\"/{post}/edit\") async def edit_post_apply(request: web.Request) -> web.Response: post_id = request.match_info[\"post\"] db =", "id INTEGER PRIMARY KEY, title TEXT, text TEXT, owner TEXT, editor TEXT) \"\"\"", "pathlib import Path from typing import Any, AsyncIterator, Dict import aiohttp_jinja2 import aiosqlite", "web.RouteTableDef() async def fetch_post(db: aiosqlite.Connection, post_id: int) -> Dict[str, Any]: async with db.execute(", "request.match_info[\"post\"] if post_id.endswith(\".ico\"): raise web.HTTPSeeOther(location=f\"/\") db = request.config_dict[\"DB\"] return {\"post\": await fetch_post(db, post_id)}", "def view_post(request: web.Request) -> Dict[str, Any]: post_id = request.match_info[\"post\"] if post_id.endswith(\".ico\"): raise web.HTTPSeeOther(location=f\"/\")", "from posts where id = ?\", [post_id] ) as cursor: row = await", "await db.commit() raise web.HTTPSeeOther(location=f\"/{post_id}\") @router.get(\"/{post}/delete\") async def delete_post(request: web.Request) -> web.Response: post_id =", ") as cursor: row = await cursor.fetchone() print(row) if row is None: raise", "-> AsyncIterator[None]: sqlite_db = get_db_path() db = await aiosqlite.connect(sqlite_db) db.row_factory = aiosqlite.Row app[\"DB\"]", "\"select owner, editor, title, text from posts where id = ?\", [post_id] )", "web.Application: app = web.Application() app.add_routes(router) app.cleanup_ctx.append(init_db) aiohttp_jinja2.setup( app, loader=jinja2.FileSystemLoader(str(Path.cwd() / \"templates\")) ) return", "await cursor.fetchone() print(row) if row is None: raise RuntimeError(f\"Post {post_id} does not exist\")", "from pathlib import Path from typing import Any, AsyncIterator, Dict import aiohttp_jinja2 import", "= await aiosqlite.connect(sqlite_db) db.row_factory = aiosqlite.Row app[\"DB\"] = db yield await db.close() async", "row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"], } ) return {\"posts\": ret} @router.get(\"/new\") @aiohttp_jinja2.template(\"new.html\") async", "\"text\": row[\"text\"], } @router.get(\"/\") @aiohttp_jinja2.template(\"index.html\") async def index(request: web.Request) -> Dict[str, Any]: ret", "return {\"post\": await fetch_post(db, post_id)} @router.post(\"/{post}/edit\") async def edit_post_apply(request: web.Request) -> web.Response: post_id", "= request.match_info[\"post\"] db = request.config_dict[\"DB\"] post = await request.post() await db.execute( \"update posts", "ret} @router.get(\"/new\") @aiohttp_jinja2.template(\"new.html\") async def new_post(request: web.Request) -> Dict[str, Any]: return {} @router.post(\"/new\")", "?\", [post_id] ) as cursor: row = await cursor.fetchone() print(row) if row is", "return app def try_make_db() -> None: sqlite_db = get_db_path() if sqlite_db.exists(): return with", "id, owner, editor, title from posts\") as cursor: async for row in cursor:", "= web.RouteTableDef() async def fetch_post(db: aiosqlite.Connection, post_id: int) -> Dict[str, Any]: async with", "Any]: post_id = request.match_info[\"post\"] if post_id.endswith(\".ico\"): raise web.HTTPSeeOther(location=f\"/\") db = request.config_dict[\"DB\"] return {\"post\":", "db.commit() raise web.HTTPSeeOther(location=f\"/\") @router.get(\"/{post}\") @aiohttp_jinja2.template(\"view.html\") async def view_post(request: web.Request) -> Dict[str, Any]: post_id", "set title=?, text=? where id =?\", [post[\"title\"], post[\"text\"], post_id], ) await db.commit() raise", "aiosqlite.connect(sqlite_db) db.row_factory = aiosqlite.Row app[\"DB\"] = db yield await db.close() async def init_app()", "\"insert into posts (owner, editor, title, text) values (?,?,?,?)\", [owner, owner, post[\"title\"], post[\"text\"]],", "@router.get(\"/new\") @aiohttp_jinja2.template(\"new.html\") async def new_post(request: web.Request) -> Dict[str, Any]: return {} @router.post(\"/new\") @aiohttp_jinja2.template(\"edit.html\")", "Path.cwd() return here / \"db.sqlite3\" async def init_db(app: web.Application) -> AsyncIterator[None]: sqlite_db =", "Dict[str, Any]: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] return {\"post\": await fetch_post(db, post_id)}", "[] db = request.config_dict[\"DB\"] async with db.execute(\"select id, owner, editor, title from posts\")", "return here / \"db.sqlite3\" async def init_db(app: web.Application) -> AsyncIterator[None]: sqlite_db = get_db_path()", "raise web.HTTPSeeOther(location=f\"/{post_id}\") @router.get(\"/{post}/delete\") async def delete_post(request: web.Request) -> web.Response: post_id = request.match_info[\"post\"] db", "post_id = request.match_info[\"post\"] if post_id.endswith(\".ico\"): raise web.HTTPSeeOther(location=f\"/\") db = request.config_dict[\"DB\"] return {\"post\": await", "async def delete_post(request: web.Request) -> web.Response: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] await", "raise web.HTTPSeeOther(location=f\"/\") db = request.config_dict[\"DB\"] return {\"post\": await fetch_post(db, post_id)} @router.get(\"/{post}/edit\") @aiohttp_jinja2.template(\"edit.html\") async", "db = request.config_dict[\"DB\"] post = await request.post() await db.execute( \"update posts set title=?,", "web.Request) -> web.Response: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] await db.execute(\"delete from posts", "here = Path.cwd() return here / \"db.sqlite3\" async def init_db(app: web.Application) -> AsyncIterator[None]:", "id =?\", [post[\"title\"], post[\"text\"], post_id], ) await db.commit() raise web.HTTPSeeOther(location=f\"/{post_id}\") @router.get(\"/{post}/delete\") async def", "aiohttp import web router = web.RouteTableDef() async def fetch_post(db: aiosqlite.Connection, post_id: int) ->", "try_make_db() -> None: sqlite_db = get_db_path() if sqlite_db.exists(): return with sqlite3.connect(sqlite_db) as conn:", "app[\"DB\"] = db yield await db.close() async def init_app() -> web.Application: app =", "request.match_info[\"post\"] db = request.config_dict[\"DB\"] return {\"post\": await fetch_post(db, post_id)} @router.post(\"/{post}/edit\") async def edit_post_apply(request:", "view_post(request: web.Request) -> Dict[str, Any]: post_id = request.match_info[\"post\"] if post_id.endswith(\".ico\"): raise web.HTTPSeeOther(location=f\"/\") db", "db.commit() raise web.HTTPSeeOther(location=f\"/{post_id}\") @router.get(\"/{post}/delete\") async def delete_post(request: web.Request) -> web.Response: post_id = request.match_info[\"post\"]", "\"db.sqlite3\" async def init_db(app: web.Application) -> AsyncIterator[None]: sqlite_db = get_db_path() db = await", "import jinja2 from aiohttp import web router = web.RouteTableDef() async def fetch_post(db: aiosqlite.Connection,", "from posts\") as cursor: async for row in cursor: ret.append( { \"id\": row[\"id\"],", "row[\"editor\"], \"title\": row[\"title\"], } ) return {\"posts\": ret} @router.get(\"/new\") @aiohttp_jinja2.template(\"new.html\") async def new_post(request:", "yield await db.close() async def init_app() -> web.Application: app = web.Application() app.add_routes(router) app.cleanup_ctx.append(init_db)", "ret = [] db = request.config_dict[\"DB\"] async with db.execute(\"select id, owner, editor, title", "conn.cursor() cur.execute( \"\"\"CREATE TABLE posts ( id INTEGER PRIMARY KEY, title TEXT, text", "/ \"db.sqlite3\" async def init_db(app: web.Application) -> AsyncIterator[None]: sqlite_db = get_db_path() db =", "sqlite_db.exists(): return with sqlite3.connect(sqlite_db) as conn: cur = conn.cursor() cur.execute( \"\"\"CREATE TABLE posts", "\"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"], } ) return {\"posts\": ret} @router.get(\"/new\") @aiohttp_jinja2.template(\"new.html\")", "request.match_info[\"post\"] db = request.config_dict[\"DB\"] await db.execute(\"delete from posts where id=?\", [post_id]) raise web.HTTPSeeOther(location=f\"/\")", "Dict[str, Any]: return {} @router.post(\"/new\") @aiohttp_jinja2.template(\"edit.html\") async def new_post_apply(request: web.Request) -> Dict[str, Any]:", "db = request.config_dict[\"DB\"] async with db.execute(\"select id, owner, editor, title from posts\") as", "= [] db = request.config_dict[\"DB\"] async with db.execute(\"select id, owner, editor, title from", "loader=jinja2.FileSystemLoader(str(Path.cwd() / \"templates\")) ) return app def try_make_db() -> None: sqlite_db = get_db_path()", "post[\"title\"], post[\"text\"]], ) await db.commit() raise web.HTTPSeeOther(location=f\"/\") @router.get(\"/{post}\") @aiohttp_jinja2.template(\"view.html\") async def view_post(request: web.Request)", "async def view_post(request: web.Request) -> Dict[str, Any]: post_id = request.match_info[\"post\"] if post_id.endswith(\".ico\"): raise", "def try_make_db() -> None: sqlite_db = get_db_path() if sqlite_db.exists(): return with sqlite3.connect(sqlite_db) as", ") return {\"posts\": ret} @router.get(\"/new\") @aiohttp_jinja2.template(\"new.html\") async def new_post(request: web.Request) -> Dict[str, Any]:", "index(request: web.Request) -> Dict[str, Any]: ret = [] db = request.config_dict[\"DB\"] async with", "@aiohttp_jinja2.template(\"new.html\") async def new_post(request: web.Request) -> Dict[str, Any]: return {} @router.post(\"/new\") @aiohttp_jinja2.template(\"edit.html\") async", "Dict[str, Any]: db = request.config_dict[\"DB\"] post = await request.post() owner = \"Anonymous\" await", "aiosqlite.Row app[\"DB\"] = db yield await db.close() async def init_app() -> web.Application: app", "return { \"id\": post_id, \"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"], \"text\": row[\"text\"], }", "cur.execute( \"\"\"CREATE TABLE posts ( id INTEGER PRIMARY KEY, title TEXT, text TEXT,", "async def new_post(request: web.Request) -> Dict[str, Any]: return {} @router.post(\"/new\") @aiohttp_jinja2.template(\"edit.html\") async def", "request.config_dict[\"DB\"] post = await request.post() await db.execute( \"update posts set title=?, text=? where", "where id = ?\", [post_id] ) as cursor: row = await cursor.fetchone() print(row)", "= request.match_info[\"post\"] db = request.config_dict[\"DB\"] return {\"post\": await fetch_post(db, post_id)} @router.post(\"/{post}/edit\") async def", "title TEXT, text TEXT, owner TEXT, editor TEXT) \"\"\" ) conn.commit() try_make_db() web.run_app(init_app())", "here / \"db.sqlite3\" async def init_db(app: web.Application) -> AsyncIterator[None]: sqlite_db = get_db_path() db", "import aiohttp_jinja2 import aiosqlite import jinja2 from aiohttp import web router = web.RouteTableDef()", "where id=?\", [post_id]) raise web.HTTPSeeOther(location=f\"/\") def get_db_path() -> Path: here = Path.cwd() return", "await request.post() owner = \"Anonymous\" await db.execute( \"insert into posts (owner, editor, title,", "int) -> Dict[str, Any]: async with db.execute( \"select owner, editor, title, text from", "db.execute( \"update posts set title=?, text=? where id =?\", [post[\"title\"], post[\"text\"], post_id], )", "app def try_make_db() -> None: sqlite_db = get_db_path() if sqlite_db.exists(): return with sqlite3.connect(sqlite_db)", "PRIMARY KEY, title TEXT, text TEXT, owner TEXT, editor TEXT) \"\"\" ) conn.commit()", "sqlite3.connect(sqlite_db) as conn: cur = conn.cursor() cur.execute( \"\"\"CREATE TABLE posts ( id INTEGER", "row[\"title\"], \"text\": row[\"text\"], } @router.get(\"/\") @aiohttp_jinja2.template(\"index.html\") async def index(request: web.Request) -> Dict[str, Any]:", "is None: raise RuntimeError(f\"Post {post_id} does not exist\") return { \"id\": post_id, \"owner\":", "Any]: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] return {\"post\": await fetch_post(db, post_id)} @router.post(\"/{post}/edit\")", "-> Path: here = Path.cwd() return here / \"db.sqlite3\" async def init_db(app: web.Application)", "= request.config_dict[\"DB\"] post = await request.post() await db.execute( \"update posts set title=?, text=?", "web.Response: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] post = await request.post() await db.execute(", "-> Dict[str, Any]: db = request.config_dict[\"DB\"] post = await request.post() owner = \"Anonymous\"", "= request.config_dict[\"DB\"] post = await request.post() owner = \"Anonymous\" await db.execute( \"insert into", "= request.config_dict[\"DB\"] return {\"post\": await fetch_post(db, post_id)} @router.get(\"/{post}/edit\") @aiohttp_jinja2.template(\"edit.html\") async def edit_post(request: web.Request)", "INTEGER PRIMARY KEY, title TEXT, text TEXT, owner TEXT, editor TEXT) \"\"\" )", "web.Request) -> Dict[str, Any]: return {} @router.post(\"/new\") @aiohttp_jinja2.template(\"edit.html\") async def new_post_apply(request: web.Request) ->", "typing import Any, AsyncIterator, Dict import aiohttp_jinja2 import aiosqlite import jinja2 from aiohttp", ") return app def try_make_db() -> None: sqlite_db = get_db_path() if sqlite_db.exists(): return", "cursor.fetchone() print(row) if row is None: raise RuntimeError(f\"Post {post_id} does not exist\") return", "import aiosqlite import jinja2 from aiohttp import web router = web.RouteTableDef() async def", "return {\"posts\": ret} @router.get(\"/new\") @aiohttp_jinja2.template(\"new.html\") async def new_post(request: web.Request) -> Dict[str, Any]: return", "db = request.config_dict[\"DB\"] post = await request.post() owner = \"Anonymous\" await db.execute( \"insert", "web.HTTPSeeOther(location=f\"/\") def get_db_path() -> Path: here = Path.cwd() return here / \"db.sqlite3\" async", "\"id\": post_id, \"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"], \"text\": row[\"text\"], } @router.get(\"/\") @aiohttp_jinja2.template(\"index.html\")", "if post_id.endswith(\".ico\"): raise web.HTTPSeeOther(location=f\"/\") db = request.config_dict[\"DB\"] return {\"post\": await fetch_post(db, post_id)} @router.get(\"/{post}/edit\")", "web.Request) -> web.Response: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] post = await request.post()", "await db.execute( \"insert into posts (owner, editor, title, text) values (?,?,?,?)\", [owner, owner,", "from posts where id=?\", [post_id]) raise web.HTTPSeeOther(location=f\"/\") def get_db_path() -> Path: here =", "await db.close() async def init_app() -> web.Application: app = web.Application() app.add_routes(router) app.cleanup_ctx.append(init_db) aiohttp_jinja2.setup(", "request.config_dict[\"DB\"] return {\"post\": await fetch_post(db, post_id)} @router.post(\"/{post}/edit\") async def edit_post_apply(request: web.Request) -> web.Response:", "web.Response: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] await db.execute(\"delete from posts where id=?\",", "Any]: async with db.execute( \"select owner, editor, title, text from posts where id", "post[\"text\"]], ) await db.commit() raise web.HTTPSeeOther(location=f\"/\") @router.get(\"/{post}\") @aiohttp_jinja2.template(\"view.html\") async def view_post(request: web.Request) ->", "[post_id] ) as cursor: row = await cursor.fetchone() print(row) if row is None:", "-> web.Application: app = web.Application() app.add_routes(router) app.cleanup_ctx.append(init_db) aiohttp_jinja2.setup( app, loader=jinja2.FileSystemLoader(str(Path.cwd() / \"templates\")) )", "posts ( id INTEGER PRIMARY KEY, title TEXT, text TEXT, owner TEXT, editor", "= await cursor.fetchone() print(row) if row is None: raise RuntimeError(f\"Post {post_id} does not", "aiohttp_jinja2 import aiosqlite import jinja2 from aiohttp import web router = web.RouteTableDef() async", "row[\"text\"], } @router.get(\"/\") @aiohttp_jinja2.template(\"index.html\") async def index(request: web.Request) -> Dict[str, Any]: ret =", "= request.match_info[\"post\"] db = request.config_dict[\"DB\"] await db.execute(\"delete from posts where id=?\", [post_id]) raise", "title, text) values (?,?,?,?)\", [owner, owner, post[\"title\"], post[\"text\"]], ) await db.commit() raise web.HTTPSeeOther(location=f\"/\")", "Path from typing import Any, AsyncIterator, Dict import aiohttp_jinja2 import aiosqlite import jinja2", "def index(request: web.Request) -> Dict[str, Any]: ret = [] db = request.config_dict[\"DB\"] async", "@router.get(\"/{post}\") @aiohttp_jinja2.template(\"view.html\") async def view_post(request: web.Request) -> Dict[str, Any]: post_id = request.match_info[\"post\"] if", "= request.config_dict[\"DB\"] await db.execute(\"delete from posts where id=?\", [post_id]) raise web.HTTPSeeOther(location=f\"/\") def get_db_path()", "= web.Application() app.add_routes(router) app.cleanup_ctx.append(init_db) aiohttp_jinja2.setup( app, loader=jinja2.FileSystemLoader(str(Path.cwd() / \"templates\")) ) return app def", "from typing import Any, AsyncIterator, Dict import aiohttp_jinja2 import aiosqlite import jinja2 from", "cursor: ret.append( { \"id\": row[\"id\"], \"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"], } )", "await aiosqlite.connect(sqlite_db) db.row_factory = aiosqlite.Row app[\"DB\"] = db yield await db.close() async def", "( id INTEGER PRIMARY KEY, title TEXT, text TEXT, owner TEXT, editor TEXT)", "(owner, editor, title, text) values (?,?,?,?)\", [owner, owner, post[\"title\"], post[\"text\"]], ) await db.commit()", "does not exist\") return { \"id\": post_id, \"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"],", "-> Dict[str, Any]: async with db.execute( \"select owner, editor, title, text from posts", "async def edit_post(request: web.Request) -> Dict[str, Any]: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"]", "db = request.config_dict[\"DB\"] await db.execute(\"delete from posts where id=?\", [post_id]) raise web.HTTPSeeOther(location=f\"/\") def", "web.Request) -> Dict[str, Any]: db = request.config_dict[\"DB\"] post = await request.post() owner =", "(?,?,?,?)\", [owner, owner, post[\"title\"], post[\"text\"]], ) await db.commit() raise web.HTTPSeeOther(location=f\"/\") @router.get(\"/{post}\") @aiohttp_jinja2.template(\"view.html\") async", "init_app() -> web.Application: app = web.Application() app.add_routes(router) app.cleanup_ctx.append(init_db) aiohttp_jinja2.setup( app, loader=jinja2.FileSystemLoader(str(Path.cwd() / \"templates\"))", "web.Request) -> Dict[str, Any]: post_id = request.match_info[\"post\"] if post_id.endswith(\".ico\"): raise web.HTTPSeeOther(location=f\"/\") db =", "AsyncIterator, Dict import aiohttp_jinja2 import aiosqlite import jinja2 from aiohttp import web router", "text) values (?,?,?,?)\", [owner, owner, post[\"title\"], post[\"text\"]], ) await db.commit() raise web.HTTPSeeOther(location=f\"/\") @router.get(\"/{post}\")", "} ) return {\"posts\": ret} @router.get(\"/new\") @aiohttp_jinja2.template(\"new.html\") async def new_post(request: web.Request) -> Dict[str,", "db.execute( \"insert into posts (owner, editor, title, text) values (?,?,?,?)\", [owner, owner, post[\"title\"],", "not exist\") return { \"id\": post_id, \"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"], \"text\":", "title from posts\") as cursor: async for row in cursor: ret.append( { \"id\":", "web.HTTPSeeOther(location=f\"/{post_id}\") @router.get(\"/{post}/delete\") async def delete_post(request: web.Request) -> web.Response: post_id = request.match_info[\"post\"] db =", "cursor: row = await cursor.fetchone() print(row) if row is None: raise RuntimeError(f\"Post {post_id}", "Any]: return {} @router.post(\"/new\") @aiohttp_jinja2.template(\"edit.html\") async def new_post_apply(request: web.Request) -> Dict[str, Any]: db", "jinja2 from aiohttp import web router = web.RouteTableDef() async def fetch_post(db: aiosqlite.Connection, post_id:", "await fetch_post(db, post_id)} @router.post(\"/{post}/edit\") async def edit_post_apply(request: web.Request) -> web.Response: post_id = request.match_info[\"post\"]", "= get_db_path() if sqlite_db.exists(): return with sqlite3.connect(sqlite_db) as conn: cur = conn.cursor() cur.execute(", "router = web.RouteTableDef() async def fetch_post(db: aiosqlite.Connection, post_id: int) -> Dict[str, Any]: async", "async with db.execute( \"select owner, editor, title, text from posts where id =", "post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] post = await request.post() await db.execute( \"update", "with db.execute( \"select owner, editor, title, text from posts where id = ?\",", "with db.execute(\"select id, owner, editor, title from posts\") as cursor: async for row", "id = ?\", [post_id] ) as cursor: row = await cursor.fetchone() print(row) if", "web.Application) -> AsyncIterator[None]: sqlite_db = get_db_path() db = await aiosqlite.connect(sqlite_db) db.row_factory = aiosqlite.Row", "= await request.post() owner = \"Anonymous\" await db.execute( \"insert into posts (owner, editor,", "db.close() async def init_app() -> web.Application: app = web.Application() app.add_routes(router) app.cleanup_ctx.append(init_db) aiohttp_jinja2.setup( app,", "fetch_post(db, post_id)} @router.get(\"/{post}/edit\") @aiohttp_jinja2.template(\"edit.html\") async def edit_post(request: web.Request) -> Dict[str, Any]: post_id =", "request.post() owner = \"Anonymous\" await db.execute( \"insert into posts (owner, editor, title, text)", "get_db_path() -> Path: here = Path.cwd() return here / \"db.sqlite3\" async def init_db(app:", "\"editor\": row[\"editor\"], \"title\": row[\"title\"], } ) return {\"posts\": ret} @router.get(\"/new\") @aiohttp_jinja2.template(\"new.html\") async def", "KEY, title TEXT, text TEXT, owner TEXT, editor TEXT) \"\"\" ) conn.commit() try_make_db()", "as cursor: row = await cursor.fetchone() print(row) if row is None: raise RuntimeError(f\"Post", "where id =?\", [post[\"title\"], post[\"text\"], post_id], ) await db.commit() raise web.HTTPSeeOther(location=f\"/{post_id}\") @router.get(\"/{post}/delete\") async", "RuntimeError(f\"Post {post_id} does not exist\") return { \"id\": post_id, \"owner\": row[\"owner\"], \"editor\": row[\"editor\"],", "{ \"id\": post_id, \"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"], \"text\": row[\"text\"], } @router.get(\"/\")", "request.config_dict[\"DB\"] async with db.execute(\"select id, owner, editor, title from posts\") as cursor: async", "fetch_post(db, post_id)} @router.post(\"/{post}/edit\") async def edit_post_apply(request: web.Request) -> web.Response: post_id = request.match_info[\"post\"] db", "web.Request) -> Dict[str, Any]: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] return {\"post\": await", "await fetch_post(db, post_id)} @router.get(\"/{post}/edit\") @aiohttp_jinja2.template(\"edit.html\") async def edit_post(request: web.Request) -> Dict[str, Any]: post_id", "def edit_post_apply(request: web.Request) -> web.Response: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] post =", "into posts (owner, editor, title, text) values (?,?,?,?)\", [owner, owner, post[\"title\"], post[\"text\"]], )", "async def init_db(app: web.Application) -> AsyncIterator[None]: sqlite_db = get_db_path() db = await aiosqlite.connect(sqlite_db)", "edit_post(request: web.Request) -> Dict[str, Any]: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] return {\"post\":", "=?\", [post[\"title\"], post[\"text\"], post_id], ) await db.commit() raise web.HTTPSeeOther(location=f\"/{post_id}\") @router.get(\"/{post}/delete\") async def delete_post(request:", "return {} @router.post(\"/new\") @aiohttp_jinja2.template(\"edit.html\") async def new_post_apply(request: web.Request) -> Dict[str, Any]: db =", "Any]: db = request.config_dict[\"DB\"] post = await request.post() owner = \"Anonymous\" await db.execute(", "Path: here = Path.cwd() return here / \"db.sqlite3\" async def init_db(app: web.Application) ->", "@aiohttp_jinja2.template(\"edit.html\") async def new_post_apply(request: web.Request) -> Dict[str, Any]: db = request.config_dict[\"DB\"] post =", "= db yield await db.close() async def init_app() -> web.Application: app = web.Application()", "post_id, \"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"], \"text\": row[\"text\"], } @router.get(\"/\") @aiohttp_jinja2.template(\"index.html\") async", "-> Dict[str, Any]: return {} @router.post(\"/new\") @aiohttp_jinja2.template(\"edit.html\") async def new_post_apply(request: web.Request) -> Dict[str,", "owner, editor, title from posts\") as cursor: async for row in cursor: ret.append(", "\"id\": row[\"id\"], \"owner\": row[\"owner\"], \"editor\": row[\"editor\"], \"title\": row[\"title\"], } ) return {\"posts\": ret}", ") await db.commit() raise web.HTTPSeeOther(location=f\"/\") @router.get(\"/{post}\") @aiohttp_jinja2.template(\"view.html\") async def view_post(request: web.Request) -> Dict[str,", "\"title\": row[\"title\"], \"text\": row[\"text\"], } @router.get(\"/\") @aiohttp_jinja2.template(\"index.html\") async def index(request: web.Request) -> Dict[str,", "new_post(request: web.Request) -> Dict[str, Any]: return {} @router.post(\"/new\") @aiohttp_jinja2.template(\"edit.html\") async def new_post_apply(request: web.Request)", "{\"post\": await fetch_post(db, post_id)} @router.get(\"/{post}/edit\") @aiohttp_jinja2.template(\"edit.html\") async def edit_post(request: web.Request) -> Dict[str, Any]:", "@router.post(\"/new\") @aiohttp_jinja2.template(\"edit.html\") async def new_post_apply(request: web.Request) -> Dict[str, Any]: db = request.config_dict[\"DB\"] post", "editor, title, text) values (?,?,?,?)\", [owner, owner, post[\"title\"], post[\"text\"]], ) await db.commit() raise", "editor, title from posts\") as cursor: async for row in cursor: ret.append( {", "async def edit_post_apply(request: web.Request) -> web.Response: post_id = request.match_info[\"post\"] db = request.config_dict[\"DB\"] post", ") await db.commit() raise web.HTTPSeeOther(location=f\"/{post_id}\") @router.get(\"/{post}/delete\") async def delete_post(request: web.Request) -> web.Response: post_id", "row = await cursor.fetchone() print(row) if row is None: raise RuntimeError(f\"Post {post_id} does", "raise web.HTTPSeeOther(location=f\"/\") def get_db_path() -> Path: here = Path.cwd() return here / \"db.sqlite3\"" ]
[ "string: the string representation of the point. ''' def __str__(self): return \"point: \"", "of the pheromones of the list of points. Return: - float: the total", "of points. Return: - list: the the list of points. ''' def get_list_points(self):", "def get_point(self): return self.point ''' Method to get the pheromone of the point.", "- interval: an interval to draw number from. - number_ants: The number of", "higher pheromone. Return: - Point: the point with the higher pheromone trail. '''", "string representation of the bat. ''' def __str__(self): memory = \"\" for point", "False self.memory.append(point) if( len(self.memory) > self.memory_limit ): del self.memory[0] return True ''' Method", "''' class ACO(): ''' The constructor of the class. Params: - num_params: the", "''' Class ACO. Class to run the ant colony optimization with respect of", "ant, cost = self.get_best_ant(fx) self.update_pheromone(ant, cost) if(cost < best_cost): best_location = ant.get_location() best_ant", "respect to the cost function. Return: - Ant: the best ant in the", "heuristic over the objective function. Params: - fx: the cost function. Return: -list:", "the higher pheromone. Return: - Point: the point with the higher pheromone trail.", "def __init__(self, num_params, discrete_points, interval, number_ants, q, evaporation_rate, num_iterations = 50) -> None:", "in point_list.get_list_points(): if(random() > (point.get_pheromone())/point_list.get_total_pheromones()): ant_asigned = ant.set_memory(point) if (ant_asigned): ant.assign_point(point) break '''", "point in self.current_localization: output_list.append(point.get_point()) return output_list ''' Method to update the position of", "Point(): ''' The constructor of the class. Params: - point: a coordinate. -", "> (point.get_pheromone())/point_list.get_total_pheromones()): ant_asigned = ant.set_memory(point) if (ant_asigned): ant.assign_point(point) break ''' Method to run", "Method that does a local search around the current position of an ant.", "ant.update_pheromone(cost) for point_list in self.points: point_list.evaporate_pheromone(self.p) ''' Method in which the ants in", "[Point(uniform(0, pi),1/2) for _ in range(0,int(n/2))] + [Point(uniform(0, 2*pi),1/2) for _ in range(0,int(n/2))]", "update the position of the ant. Params: - new_location: a list that contains", "point of the ant. Params: - error: The error induced by the best", "visited placed that an ant can remember. ''' def __init__(self, memory_limit) -> None:", "of the class. Params: - point: a coordinate. - pheromone: the pheromone that", "- new_location: a list that contains the coordinates of the new location. '''", "class. Params: - point: a coordinate. - pheromone: the pheromone that leads to", "a coordinate. - pheromone: the pheromone that leads to the point. ''' def", "to the point. ''' class Point(): ''' The constructor of the class. Params:", "set_memory(self, point): for p in self.memory: if(point.get_point() == p.get_point()): return False self.memory.append(point) if(", "and return it's location. ''' class Ant(): ''' The constructor of the class.", "list: the list of coordinates of the ant position. ''' def get_location(self): output_list", "''' def update_location(self, new_location): for i in range(len(self.current_localization)): self.current_localization[i].set_point(new_location[i]) ''' Method that adds", "best ant. ''' def get_best_ant(self, function): best_ant = self.ants[0] cost = function(best_ant.get_location()) for", "set_pheromone(self, pheromone): self.pheromone = pheromone ''' Method to set a coordinate of the", "- float: the cost of the best ant. ''' def get_best_ant(self, function): best_ant", "coordinates of the ant position. ''' def get_location(self): output_list = list() for point", "sample. - interval: an interval to draw number from. - number_ants: The number", "cost of the best point found by the colony. ''' def run(self,fx): self.probabilistic_construction()", "- True: if the point was added to the memory and False otherwise.", "solution in the colony. ''' def update_pheromone(self, error): for point in self.current_localization: point.set_pheromone(point.get_pheromone()", "self.points.append(PointsList(first_guess_linear(discrete_points))) ''' Method that returns the best ant and it's cost with respect", "update_pheromone(self, ant, cost): ant.update_pheromone(cost) for point_list in self.points: point_list.evaporate_pheromone(self.p) ''' Method in which", "run(self,fx): self.probabilistic_construction() self.local_search(fx) best_ant, best_cost = self.get_best_ant(fx) best_location = best_ant.get_location() self.update_pheromone(best_ant, best_cost) for", "list: the the list of points. ''' def get_list_points(self): return self.points ''' Method", "the point. ''' def get_point(self): return self.point ''' Method to get the pheromone", "updates the pheromone of the ants in the colony. ''' def update_pheromone(self, ant,", "Class Point. A point is an object that has a position and a", "a local search around the current position of an ant. ''' def local_search(self,", "self.number_params = num_params self.num_iterations = num_iterations self.discrete_points = discrete_points self.points = list() self.q", "that returns the best ant and it's cost with respect to the cost", "''' Method that updates the pheromone of the ants in the colony. '''", "Method that updates the pheromone of the ants in the colony. ''' def", "in self.points: for point in point_list.get_list_points(): if(random() > (point.get_pheromone())/point_list.get_total_pheromones()): ant_asigned = ant.set_memory(point) if", "the point. ''' class Point(): ''' The constructor of the class. Params: -", "in self.points: if(point.get_pheromone() > best_point.get_pheromone()): best_point = point return best_point ''' Method that", "the colony. ''' def run(self,fx): self.probabilistic_construction() self.local_search(fx) best_ant, best_cost = self.get_best_ant(fx) best_location =", "def __init__(self, list_of_points) -> None: self.points = list_of_points ''' Method that returns the", "ant in the colony. - float: the cost of the best ant. '''", "The error induced by the best solution in the colony. ''' def update_pheromone(self,", "cost ''' Method that does a local search around the current position of", "list that contains the location of the ant. ''' def assign_point(self, point): self.current_localization.append(point)", "the colony decides to move to a location based on the pheromone trail", "def set_memory(self, point): for p in self.memory: if(point.get_point() == p.get_point()): return False self.memory.append(point)", "of points. ''' def __init__(self, list_of_points) -> None: self.points = list_of_points ''' Method", "point in self.points: total += point.get_pheromone() return total ''' Method that returns the", "point): self.point = point ''' Method that returns the string representation of the", "string representation of the point. Return: - string: the string representation of the", "that returns the list of points. Return: - list: the the list of", "num_params self.num_iterations = num_iterations self.discrete_points = discrete_points self.points = list() self.q = q", "Method that returns the point object that has the higher pheromone. Return: -", "def assign_point(self, point): self.current_localization.append(point) ''' Method that updates the pheromone of the current", "def set_pheromone(self, pheromone): self.pheromone = pheromone ''' Method to set a coordinate of", "self.get_best_ant(fx) best_location = best_ant.get_location() self.update_pheromone(best_ant, best_cost) for i in range(self.num_iterations): self.probabilistic_construction() self.local_search(fx) ant,", "of the point. ''' def __str__(self): return \"point: \" + str(self.point) + \",\"", "the point. ''' def get_pheromone(self): return self.pheromone ''' Method to set the pheromone", "self.current_localization: output_list.append(point.get_point()) return output_list ''' Method to update the position of the ant.", "from math import e, sqrt,cos,pi ''' Class Point. A point is an object", "def __str__(self): return \"point: \" + str(self.point) + \",\" + \"pheromone: \" +", "self.points: point_list.evaporate_pheromone(self.p) ''' Method in which the ants in the colony decides to", "best solution in the colony. ''' def update_pheromone(self, error): for point in self.current_localization:", "in self.points: point_list.evaporate_pheromone(self.p) ''' Method in which the ants in the colony decides", "of the point. Return: - string: the string representation of the point. '''", "clear the ant location. ''' def clear_location(self): self.current_localization = list() ''' Method to", "''' def get_list_points(self): return self.points ''' Method that evaporates the pheromones in the", "is an object that has a position, a memory and a limit for", "''' def set_point(self, point): self.point = point ''' Method that returns the string", "the point. Params: - point: The coordinate of the point. ''' def set_point(self,", "- list: the the list of points. ''' def get_list_points(self): return self.points '''", "in the colony. ''' def update_pheromone(self, error): for point in self.current_localization: point.set_pheromone(point.get_pheromone() +", "- Ant: the best ant in the colony. - float: the cost of", "found by the colony. ''' def run(self,fx): self.probabilistic_construction() self.local_search(fx) best_ant, best_cost = self.get_best_ant(fx)", "save a new location in the ant memory. Params: - point: the point", "location ''' Class PointList. A list that contains points. ''' class PointsList(): '''", "output_list = list() for point in self.current_localization: output_list.append(point.get_point()) return output_list ''' Method to", "__init__(self, list_of_points) -> None: self.points = list_of_points ''' Method that returns the point", "an ant can remember. ''' def __init__(self, memory_limit) -> None: self.memory = list()", "the pheromone of the current location point of the ant. Params: - error:", "move to a location based on the pheromone trail or on a probabilistic", "+ \"pheromone: \" + str(self.pheromone) ''' Class Ant. An ant is an object", "evaporation_rate self.ants = [Ant(num_params) for _ in range(0, number_ants)] for _ in range(0,self.number_params):", "return self.pheromone ''' Method to set the pheromone of the point. Params: -", "of the bat. Return: - string: the string representation of the bat. '''", "colony. - q: A constant. - evaporation_rate: A constant to control the evaporation", "import random, uniform from scipy.optimize import minimize from math import e, sqrt,cos,pi '''", "local_search(self, function): for ant in self.ants: res = minimize(function, ant.get_location(), method='COBYLA', options={\"maxiter\":5}) ant.update_location(res.x)", "in self.points: point.set_pheromone((1-p)*point.get_pheromone()) ''' Class ACO. Class to run the ant colony optimization", "the maximum number of previous visited placed that an ant can remember. '''", "number of ants of the colony. - q: A constant. - evaporation_rate: A", "_ in range(0, number_ants)] for _ in range(0,self.number_params): self.points.append(PointsList(first_guess_linear(discrete_points))) ''' Method that returns", "a coordinate of the point. Params: - point: The coordinate of the point.", "ant.assign_point(point_list.get_best_point()) else: for point_list in self.points: for point in point_list.get_list_points(): if(random() > (point.get_pheromone())/point_list.get_total_pheromones()):", "given function. ''' class ACO(): ''' The constructor of the class. Params: -", "position and a pheromone that leads to the point. ''' class Point(): '''", "location. ''' def clear_location(self): self.current_localization = list() ''' Method to get the coordinates", "was added to the memory and False otherwise. ''' def set_memory(self, point): for", "''' def get_point(self): return self.point ''' Method to get the pheromone of the", "Return: - float: the total pf pheromones. ''' def get_total_pheromones(self): total = 0", "in self.points: total += point.get_pheromone() return total ''' Method that returns the list", "the list of points. ''' def get_list_points(self): return self.points ''' Method that evaporates", "trail or on a probabilistic desition. ''' def probabilistic_construction(self): for ant in self.ants:", "\" \" location = \"\" for point in self.current_localization: location += \" \"", "of dimentios of the objective function. - discrete_points: the number of discrete points", "''' Class Point. A point is an object that has a position and", "cost = function(best_ant.get_location()) for ant in self.ants: ant_cost = (function(ant.get_location())) if(ant_cost < cost):", "the point. ''' def __str__(self): return \"point: \" + str(self.point) + \",\" +", "- point: The pheromone of the point. ''' def get_pheromone(self): return self.pheromone '''", "saved in the ant memory Return: - True: if the point was added", "\"pheromone: \" + str(self.pheromone) ''' Class Ant. An ant is an object that", "_ in range(0,int(n/2))] + [Point(uniform(0, 2*pi),1/2) for _ in range(0,int(n/2))] return (theta) self.number_params", "''' The constructor of the class. Params: - memory_limit: the maximum number of", "\"\" for point in self.memory: memory += \" \" + str(point) + \"", "evaporation of the pheromone. - num_iterations (optional): The number of iterations of the", "num_iterations = 50) -> None: def first_guess_linear(n): [Point(uniform(interval[0],interval[1]), 1/2) for _ in range(discrete_points)]", "''' Method that returns the string representation of the bat. Return: - string:", "to update the position of the ant. Params: - new_location: a list that", "the class. Params: - point: a coordinate. - pheromone: the pheromone that leads", "of the point. ''' def get_pheromone(self): return self.pheromone ''' Method to set the", "''' Method to get the coordinates of the ant location. Return: - list:", "contains the location of the ant. ''' def assign_point(self, point): self.current_localization.append(point) ''' Method", "The constructor of the class. Params: - point: a coordinate. - pheromone: the", "return \"memory: \" + memory + \" and \" + \"current location\" +", "location = \"\" for point in self.current_localization: location += \" \" + str(point)", "Method that returns the sum of the pheromones of the list of points.", "''' def clear_location(self): self.current_localization = list() ''' Method to get the coordinates of", "point. ''' def get_point(self): return self.point ''' Method to get the pheromone of", "Ant. An ant is an object that has a position, a memory and", "''' def get_total_pheromones(self): total = 0 for point in self.points: total += point.get_pheromone()", "def update_pheromone(self, error): for point in self.current_localization: point.set_pheromone(point.get_pheromone() + (1/error)) ''' Method to", "point: the point that will be saved in the ant memory Return: -", "if(point.get_point() == p.get_point()): return False self.memory.append(point) if( len(self.memory) > self.memory_limit ): del self.memory[0]", "pheromone of the point. Params: - pheromone: The pheromone of the point. '''", "the total pf pheromones. ''' def get_total_pheromones(self): total = 0 for point in", "be saved in the ant memory Return: - True: if the point was", "+= \" \" + str(point) + \" \" location = \"\" for point", "\" + \"current location\" + location ''' Class PointList. A list that contains", "in range(0, number_ants)] for _ in range(0,self.number_params): self.points.append(PointsList(first_guess_linear(discrete_points))) ''' Method that returns the", "''' def update_pheromone(self, error): for point in self.current_localization: point.set_pheromone(point.get_pheromone() + (1/error)) ''' Method", "True ''' Method that returns the string representation of the bat. Return: -", "ant and it's cost with respect to the cost function. Return: - Ant:", "memory Return: - True: if the point was added to the memory and", "the colony. ''' def update_pheromone(self, error): for point in self.current_localization: point.set_pheromone(point.get_pheromone() + (1/error))", "coordinates of the point. Return: - point: The coordinates of the point. '''", "def __init__(self, point, pheromone) -> None: self.point = point self.pheromone = pheromone '''", "the evaporation of the pheromone. - num_iterations (optional): The number of iterations of", "ants in the colony decides to move to a location based on the", "self.probabilistic_construction() self.local_search(fx) best_ant, best_cost = self.get_best_ant(fx) best_location = best_ant.get_location() self.update_pheromone(best_ant, best_cost) for i", "uniform from scipy.optimize import minimize from math import e, sqrt,cos,pi ''' Class Point.", "point): for p in self.memory: if(point.get_point() == p.get_point()): return False self.memory.append(point) if( len(self.memory)", "if( len(self.memory) > self.memory_limit ): del self.memory[0] return True ''' Method that returns", "None: self.point = point self.pheromone = pheromone ''' Method to get the coordinates", "of points. Return: - float: the total pf pheromones. ''' def get_total_pheromones(self): total", "number of iterations of the algorithm. ''' def __init__(self, num_params, discrete_points, interval, number_ants,", "for point_list in self.points: ant_asigned = ant.set_memory(point_list.get_best_point()) ant.assign_point(point_list.get_best_point()) else: for point_list in self.points:", "self.points: point.set_pheromone((1-p)*point.get_pheromone()) ''' Class ACO. Class to run the ant colony optimization with", "object that has a position and a pheromone that leads to the point.", "for point in self.points: point.set_pheromone((1-p)*point.get_pheromone()) ''' Class ACO. Class to run the ant", "point return best_point ''' Method that returns the sum of the pheromones of", "self.local_search(fx) best_ant, best_cost = self.get_best_ant(fx) best_location = best_ant.get_location() self.update_pheromone(best_ant, best_cost) for i in", "the algorithm. ''' def __init__(self, num_params, discrete_points, interval, number_ants, q, evaporation_rate, num_iterations =", "Class to run the ant colony optimization with respect of the given function.", "point. Return: - point: The pheromone of the point. ''' def get_pheromone(self): return", "Method to clear the ant location. ''' def clear_location(self): self.current_localization = list() '''", "self.ants: res = minimize(function, ant.get_location(), method='COBYLA', options={\"maxiter\":5}) ant.update_location(res.x) ''' Method that updates the", "Return: - point: The coordinates of the point. ''' def get_point(self): return self.point", "visited places and return it's location. ''' class Ant(): ''' The constructor of", "the string representation of the bat. Return: - string: the string representation of", "ant. ''' def get_best_ant(self, function): best_ant = self.ants[0] cost = function(best_ant.get_location()) for ant", "''' def local_search(self, function): for ant in self.ants: res = minimize(function, ant.get_location(), method='COBYLA',", "return self.point ''' Method to get the pheromone of the point. Return: -", "of discrete points to sample. - interval: an interval to draw number from.", "that leads to the point. ''' class Point(): ''' The constructor of the", "contains the coordinates of the new location. ''' def update_location(self, new_location): for i", "ant position. ''' def get_location(self): output_list = list() for point in self.current_localization: output_list.append(point.get_point())", "= [Ant(num_params) for _ in range(0, number_ants)] for _ in range(0,self.number_params): self.points.append(PointsList(first_guess_linear(discrete_points))) '''", "''' Method that returns the sum of the pheromones of the list of", "otherwise. ''' def set_memory(self, point): for p in self.memory: if(point.get_point() == p.get_point()): return", "run the ant colony optimization with respect of the given function. ''' class", "\" + str(point) + \" \" return \"memory: \" + memory + \"", "self.get_best_ant(fx) self.update_pheromone(ant, cost) if(cost < best_cost): best_location = ant.get_location() best_ant = ant best_cost", "get_list_points(self): return self.points ''' Method that evaporates the pheromones in the points. '''", "range(len(self.current_localization)): self.current_localization[i].set_point(new_location[i]) ''' Method that adds a point to the list that contains", "current location point of the ant. Params: - error: The error induced by", "ant.clear_location() if(random() > 1 - self.q): for point_list in self.points: ant_asigned = ant.set_memory(point_list.get_best_point())", "constant. - evaporation_rate: A constant to control the evaporation of the pheromone. -", "else: for point_list in self.points: for point in point_list.get_list_points(): if(random() > (point.get_pheromone())/point_list.get_total_pheromones()): ant_asigned", "leads to the point. ''' class Point(): ''' The constructor of the class.", "get_total_pheromones(self): total = 0 for point in self.points: total += point.get_pheromone() return total", "maximum number of previous visited placed that an ant can remember. ''' def", "to set a coordinate of the point. Params: - point: The coordinate of", "list() for point in self.current_localization: output_list.append(point.get_point()) return output_list ''' Method to update the", "A constant to control the evaporation of the pheromone. - num_iterations (optional): The", "assign_point(self, point): self.current_localization.append(point) ''' Method that updates the pheromone of the current location", "best_cost = self.get_best_ant(fx) best_location = best_ant.get_location() self.update_pheromone(best_ant, best_cost) for i in range(self.num_iterations): self.probabilistic_construction()", "of the ant. Params: - error: The error induced by the best solution", "that leads to the point. ''' def __init__(self, point, pheromone) -> None: self.point", "''' Method to get the pheromone of the point. Return: - point: The", "the point. Params: - pheromone: The pheromone of the point. ''' def set_pheromone(self,", "- point: the point that will be saved in the ant memory Return:", "\" location = \"\" for point in self.current_localization: location += \" \" +", "point that will be saved in the ant memory Return: - True: if", "\" + memory + \" and \" + \"current location\" + location '''", "the list of points. Return: - float: the total pf pheromones. ''' def", "if(random() > (point.get_pheromone())/point_list.get_total_pheromones()): ant_asigned = ant.set_memory(point) if (ant_asigned): ant.assign_point(point) break ''' Method to", "PSO heuristic over the objective function. Params: - fx: the cost function. Return:", "+ [Point(uniform(0, 2*pi),1/2) for _ in range(0,int(n/2))] return (theta) self.number_params = num_params self.num_iterations", "error: The error induced by the best solution in the colony. ''' def", "= self.get_best_ant(fx) best_location = best_ant.get_location() self.update_pheromone(best_ant, best_cost) for i in range(self.num_iterations): self.probabilistic_construction() self.local_search(fx)", "self.num_iterations = num_iterations self.discrete_points = discrete_points self.points = list() self.q = q self.p", "the location of the ant. ''' def assign_point(self, point): self.current_localization.append(point) ''' Method that", "with the best point find by the colony. -float: the cost of the", "scipy.optimize import minimize from math import e, sqrt,cos,pi ''' Class Point. A point", "to the memory and False otherwise. ''' def set_memory(self, point): for p in", "return it's location. ''' class Ant(): ''' The constructor of the class. Params:", "placed that an ant can remember. ''' def __init__(self, memory_limit) -> None: self.memory", "ant colony optimization with respect of the given function. ''' class ACO(): '''", "self.points: for point in point_list.get_list_points(): if(random() > (point.get_pheromone())/point_list.get_total_pheromones()): ant_asigned = ant.set_memory(point) if (ant_asigned):", "returns the point object that has the higher pheromone. Return: - Point: the", "for point in point_list.get_list_points(): if(random() > (point.get_pheromone())/point_list.get_total_pheromones()): ant_asigned = ant.set_memory(point) if (ant_asigned): ant.assign_point(point)", "of the point. Return: - point: The coordinates of the point. ''' def", "pheromone: The pheromone of the point. ''' def set_pheromone(self, pheromone): self.pheromone = pheromone", "point find by the colony. -float: the cost of the best point found", "list of points. ''' def __init__(self, list_of_points) -> None: self.points = list_of_points '''", "__init__(self, point, pheromone) -> None: self.point = point self.pheromone = pheromone ''' Method", "range(0, number_ants)] for _ in range(0,self.number_params): self.points.append(PointsList(first_guess_linear(discrete_points))) ''' Method that returns the best", "the pheromones in the points. ''' def evaporate_pheromone(self, p): for point in self.points:", "= 50) -> None: def first_guess_linear(n): [Point(uniform(interval[0],interval[1]), 1/2) for _ in range(discrete_points)] theta", "from. - number_ants: The number of ants of the colony. - q: A", "random, uniform from scipy.optimize import minimize from math import e, sqrt,cos,pi ''' Class", "in range(0,self.number_params): self.points.append(PointsList(first_guess_linear(discrete_points))) ''' Method that returns the best ant and it's cost", "around the current position of an ant. ''' def local_search(self, function): for ant", "based on the pheromone trail or on a probabilistic desition. ''' def probabilistic_construction(self):", "returns the list of points. Return: - list: the the list of points.", "coordinates of the new location. ''' def update_location(self, new_location): for i in range(len(self.current_localization)):", "the current position of an ant. ''' def local_search(self, function): for ant in", "point. ''' def set_pheromone(self, pheromone): self.pheromone = pheromone ''' Method to set a", "(function(ant.get_location())) if(ant_cost < cost): cost = ant_cost best_ant = ant return best_ant, cost", "random import random, uniform from scipy.optimize import minimize from math import e, sqrt,cos,pi", "self.points: if(point.get_pheromone() > best_point.get_pheromone()): best_point = point return best_point ''' Method that returns", "Class ACO. Class to run the ant colony optimization with respect of the", "Return: - point: The pheromone of the point. ''' def get_pheromone(self): return self.pheromone", "Ant: the best ant in the colony. - float: the cost of the", "Return: - string: the string representation of the point. ''' def __str__(self): return", "- list_of_points: the list of points. ''' def __init__(self, list_of_points) -> None: self.points", "best_ant = ant return best_ant, cost ''' Method that does a local search", "colony. -float: the cost of the best point found by the colony. '''", "in the colony. - float: the cost of the best ant. ''' def", "+= \" \" + str(point) + \" \" return \"memory: \" + memory", "pheromone): self.pheromone = pheromone ''' Method to set a coordinate of the point.", "position of the ant. Params: - new_location: a list that contains the coordinates", "point in point_list.get_list_points(): if(random() > (point.get_pheromone())/point_list.get_total_pheromones()): ant_asigned = ant.set_memory(point) if (ant_asigned): ant.assign_point(point) break", "''' Method that returns the best ant and it's cost with respect to", "discrete_points self.points = list() self.q = q self.p = evaporation_rate self.ants = [Ant(num_params)", "in the ant memory Return: - True: if the point was added to", "the point that will be saved in the ant memory Return: - True:", "def local_search(self, function): for ant in self.ants: res = minimize(function, ant.get_location(), method='COBYLA', options={\"maxiter\":5})", "Method that evaporates the pheromones in the points. ''' def evaporate_pheromone(self, p): for", "point in self.current_localization: point.set_pheromone(point.get_pheromone() + (1/error)) ''' Method to save a new location", "= list_of_points ''' Method that returns the point object that has the higher", "- memory_limit: the maximum number of previous visited placed that an ant can", "''' Method that adds a point to the list that contains the location", "= ant.set_memory(point) if (ant_asigned): ant.assign_point(point) break ''' Method to run the PSO heuristic", "import minimize from math import e, sqrt,cos,pi ''' Class Point. A point is", "Class Ant. An ant is an object that has a position, a memory", "in the colony. ''' def update_pheromone(self, ant, cost): ant.update_pheromone(cost) for point_list in self.points:", "that returns the point object that has the higher pheromone. Return: - Point:", "= \"\" for point in self.current_localization: location += \" \" + str(point) +", "of the class. Params: - memory_limit: the maximum number of previous visited placed", "- fx: the cost function. Return: -list: a list with the best point", "= pheromone ''' Method to set a coordinate of the point. Params: -", "the memory and False otherwise. ''' def set_memory(self, point): for p in self.memory:", "new location in the ant memory. Params: - point: the point that will", "point.set_pheromone(point.get_pheromone() + (1/error)) ''' Method to save a new location in the ant", "that updates the pheromone of the ants in the colony. ''' def update_pheromone(self,", "pheromone ''' Method to get the coordinates of the point. Return: - point:", "a location based on the pheromone trail or on a probabilistic desition. '''", "returns the string representation of the point. Return: - string: the string representation", "points. ''' class PointsList(): ''' The constructor of the class. Params: - list_of_points:", "evaporates the pheromones in the points. ''' def evaporate_pheromone(self, p): for point in", "the point. Return: - point: The coordinates of the point. ''' def get_point(self):", "\" return \"memory: \" + memory + \" and \" + \"current location\"", "self.pheromone = pheromone ''' Method to set a coordinate of the point. Params:", "the list of points. Return: - list: the the list of points. '''", "for _ in range(0,int(n/2))] + [Point(uniform(0, 2*pi),1/2) for _ in range(0,int(n/2))] return (theta)", "return True ''' Method that returns the string representation of the bat. Return:", "float: the cost of the best ant. ''' def get_best_ant(self, function): best_ant =", "the pheromone of the point. Return: - point: The pheromone of the point.", "Params: - point: The coordinate of the point. ''' def set_point(self, point): self.point", "the ant colony optimization with respect of the given function. ''' class ACO():", "= memory_limit self.current_localization = list() ''' Method to clear the ant location. '''", "for _ in range(0, number_ants)] for _ in range(0,self.number_params): self.points.append(PointsList(first_guess_linear(discrete_points))) ''' Method that", "it's cost with respect to the cost function. Return: - Ant: the best", "- pheromone: the pheromone that leads to the point. ''' def __init__(self, point,", "new_location): for i in range(len(self.current_localization)): self.current_localization[i].set_point(new_location[i]) ''' Method that adds a point to", "pheromones of the list of points. Return: - float: the total pf pheromones.", "float: the total pf pheromones. ''' def get_total_pheromones(self): total = 0 for point", "point. ''' def __init__(self, point, pheromone) -> None: self.point = point self.pheromone =", "a probabilistic desition. ''' def probabilistic_construction(self): for ant in self.ants: ant.clear_location() if(random() >", "if(random() > 1 - self.q): for point_list in self.points: ant_asigned = ant.set_memory(point_list.get_best_point()) ant.assign_point(point_list.get_best_point())", "- num_params: the number of dimentios of the objective function. - discrete_points: the", "over the objective function. Params: - fx: the cost function. Return: -list: a", "num_iterations (optional): The number of iterations of the algorithm. ''' def __init__(self, num_params,", "''' def get_pheromone(self): return self.pheromone ''' Method to set the pheromone of the", "p): for point in self.points: point.set_pheromone((1-p)*point.get_pheromone()) ''' Class ACO. Class to run the", "def run(self,fx): self.probabilistic_construction() self.local_search(fx) best_ant, best_cost = self.get_best_ant(fx) best_location = best_ant.get_location() self.update_pheromone(best_ant, best_cost)", "points. Return: - float: the total pf pheromones. ''' def get_total_pheromones(self): total =", "''' The constructor of the class. Params: - point: a coordinate. - pheromone:", "cost): ant.update_pheromone(cost) for point_list in self.points: point_list.evaporate_pheromone(self.p) ''' Method in which the ants", "Method in which the ants in the colony decides to move to a", "-> None: self.point = point self.pheromone = pheromone ''' Method to get the", "Method to get the pheromone of the point. Return: - point: The pheromone", "Return: - string: the string representation of the bat. ''' def __str__(self): memory", "+ \"current location\" + location ''' Class PointList. A list that contains points.", "The constructor of the class. Params: - list_of_points: the list of points. '''", "the objective function. - discrete_points: the number of discrete points to sample. -", "function. Return: -list: a list with the best point find by the colony.", "-list: a list with the best point find by the colony. -float: the", "best ant and it's cost with respect to the cost function. Return: -", "which the ants in the colony decides to move to a location based", "memory. Params: - point: the point that will be saved in the ant", "self.current_localization.append(point) ''' Method that updates the pheromone of the current location point of", "ant.assign_point(point) break ''' Method to run the PSO heuristic over the objective function.", "Params: - new_location: a list that contains the coordinates of the new location.", "The coordinates of the point. ''' def get_point(self): return self.point ''' Method to", "that returns the sum of the pheromones of the list of points. Return:", "num_iterations self.discrete_points = discrete_points self.points = list() self.q = q self.p = evaporation_rate", "list of coordinates of the ant position. ''' def get_location(self): output_list = list()", "a position and a pheromone that leads to the point. ''' class Point():", "the ant location. ''' def clear_location(self): self.current_localization = list() ''' Method to get", "list() ''' Method to get the coordinates of the ant location. Return: -", "induced by the best solution in the colony. ''' def update_pheromone(self, error): for", "+ str(point) + \" \" location = \"\" for point in self.current_localization: location", "The pheromone of the point. ''' def set_pheromone(self, pheromone): self.pheromone = pheromone '''", "Params: - list_of_points: the list of points. ''' def __init__(self, list_of_points) -> None:", "the points. ''' def evaporate_pheromone(self, p): for point in self.points: point.set_pheromone((1-p)*point.get_pheromone()) ''' Class", "algorithm. ''' def __init__(self, num_params, discrete_points, interval, number_ants, q, evaporation_rate, num_iterations = 50)", "ant in self.ants: res = minimize(function, ant.get_location(), method='COBYLA', options={\"maxiter\":5}) ant.update_location(res.x) ''' Method that", "minimize(function, ant.get_location(), method='COBYLA', options={\"maxiter\":5}) ant.update_location(res.x) ''' Method that updates the pheromone of the", "location based on the pheromone trail or on a probabilistic desition. ''' def", "0 for point in self.points: total += point.get_pheromone() return total ''' Method that", "in self.points: ant_asigned = ant.set_memory(point_list.get_best_point()) ant.assign_point(point_list.get_best_point()) else: for point_list in self.points: for point", "+= point.get_pheromone() return total ''' Method that returns the list of points. Return:", "Method to set a coordinate of the point. Params: - point: The coordinate", "for point_list in self.points: point_list.evaporate_pheromone(self.p) ''' Method in which the ants in the", "Method to update the position of the ant. Params: - new_location: a list", "of previous visited placed that an ant can remember. ''' def __init__(self, memory_limit)", "colony. - float: the cost of the best ant. ''' def get_best_ant(self, function):", "= point ''' Method that returns the string representation of the point. Return:", "The coordinate of the point. ''' def set_point(self, point): self.point = point '''", "for point in self.memory: memory += \" \" + str(point) + \" \"", "point in self.current_localization: location += \" \" + str(point) + \" \" return", "Point. A point is an object that has a position and a pheromone", "forget/remember previous visited places and return it's location. ''' class Ant(): ''' The", "returns the best ant and it's cost with respect to the cost function.", "''' def get_best_ant(self, function): best_ant = self.ants[0] cost = function(best_ant.get_location()) for ant in", "get the coordinates of the point. Return: - point: The coordinates of the", "self.point = point ''' Method that returns the string representation of the point.", "list that contains points. ''' class PointsList(): ''' The constructor of the class.", "class. Params: - list_of_points: the list of points. ''' def __init__(self, list_of_points) ->", "= ant.set_memory(point_list.get_best_point()) ant.assign_point(point_list.get_best_point()) else: for point_list in self.points: for point in point_list.get_list_points(): if(random()", "set_point(self, point): self.point = point ''' Method that returns the string representation of", "in range(0,int(n/2))] return (theta) self.number_params = num_params self.num_iterations = num_iterations self.discrete_points = discrete_points", "location. Return: - list: the list of coordinates of the ant position. '''", "the class. Params: - memory_limit: the maximum number of previous visited placed that", "in range(0,int(n/2))] + [Point(uniform(0, 2*pi),1/2) for _ in range(0,int(n/2))] return (theta) self.number_params =", "pheromone that leads to the point. ''' def __init__(self, point, pheromone) -> None:", "the list of coordinates of the ant position. ''' def get_location(self): output_list =", "a list that contains the coordinates of the new location. ''' def update_location(self,", "point. ''' def get_pheromone(self): return self.pheromone ''' Method to set the pheromone of", "self.memory[0] return True ''' Method that returns the string representation of the bat.", "that contains points. ''' class PointsList(): ''' The constructor of the class. Params:", "''' class PointsList(): ''' The constructor of the class. Params: - list_of_points: the", "q, evaporation_rate, num_iterations = 50) -> None: def first_guess_linear(n): [Point(uniform(interval[0],interval[1]), 1/2) for _", "location point of the ant. Params: - error: The error induced by the", "pheromones in the points. ''' def evaporate_pheromone(self, p): for point in self.points: point.set_pheromone((1-p)*point.get_pheromone())", "[Point(uniform(interval[0],interval[1]), 1/2) for _ in range(discrete_points)] theta = [Point(uniform(0, pi),1/2) for _ in", "constructor of the class. Params: - memory_limit: the maximum number of previous visited", "self.pheromone = pheromone ''' Method to get the coordinates of the point. Return:", "for point in self.current_localization: output_list.append(point.get_point()) return output_list ''' Method to update the position", "for i in range(len(self.current_localization)): self.current_localization[i].set_point(new_location[i]) ''' Method that adds a point to the", "ACO(): ''' The constructor of the class. Params: - num_params: the number of", "the colony. - q: A constant. - evaporation_rate: A constant to control the", "to set the pheromone of the point. Params: - pheromone: The pheromone of", "for _ in range(0,self.number_params): self.points.append(PointsList(first_guess_linear(discrete_points))) ''' Method that returns the best ant and", "50) -> None: def first_guess_linear(n): [Point(uniform(interval[0],interval[1]), 1/2) for _ in range(discrete_points)] theta =", "of the ant location. Return: - list: the list of coordinates of the", "of the colony. - q: A constant. - evaporation_rate: A constant to control", "class PointsList(): ''' The constructor of the class. Params: - list_of_points: the list", "of the best ant. ''' def get_best_ant(self, function): best_ant = self.ants[0] cost =", "that does a local search around the current position of an ant. '''", "an object that has a position, a memory and a limit for it's", "self.points = list() self.q = q self.p = evaporation_rate self.ants = [Ant(num_params) for", "= pheromone ''' Method to get the coordinates of the point. Return: -", "Method to get the coordinates of the ant location. Return: - list: the", "= minimize(function, ant.get_location(), method='COBYLA', options={\"maxiter\":5}) ant.update_location(res.x) ''' Method that updates the pheromone of", "import e, sqrt,cos,pi ''' Class Point. A point is an object that has", "a position, a memory and a limit for it's memory. An ant can", "contains points. ''' class PointsList(): ''' The constructor of the class. Params: -", "best_ant, cost ''' Method that does a local search around the current position", "can remember. ''' def __init__(self, memory_limit) -> None: self.memory = list() self.memory_limit =", "ant memory Return: - True: if the point was added to the memory", "None: self.points = list_of_points ''' Method that returns the point object that has", "num_params: the number of dimentios of the objective function. - discrete_points: the number", "Return: -list: a list with the best point find by the colony. -float:", "def get_best_ant(self, function): best_ant = self.ants[0] cost = function(best_ant.get_location()) for ant in self.ants:", "error induced by the best solution in the colony. ''' def update_pheromone(self, error):", "<reponame>mentesniker/Maxcut-solver from random import random, uniform from scipy.optimize import minimize from math import", "leads to the point. ''' def __init__(self, point, pheromone) -> None: self.point =", "the coordinates of the point. Return: - point: The coordinates of the point.", "Method to set the pheromone of the point. Params: - pheromone: The pheromone", "the given function. ''' class ACO(): ''' The constructor of the class. Params:", "a list with the best point find by the colony. -float: the cost", "point, pheromone) -> None: self.point = point self.pheromone = pheromone ''' Method to", "self.update_pheromone(ant, cost) if(cost < best_cost): best_location = ant.get_location() best_ant = ant best_cost =", "''' Method to clear the ant location. ''' def clear_location(self): self.current_localization = list()", "of the class. Params: - list_of_points: the list of points. ''' def __init__(self,", "point.set_pheromone((1-p)*point.get_pheromone()) ''' Class ACO. Class to run the ant colony optimization with respect", "a limit for it's memory. An ant can move, forget/remember previous visited places", "= num_iterations self.discrete_points = discrete_points self.points = list() self.q = q self.p =", "the colony. - float: the cost of the best ant. ''' def get_best_ant(self,", "the list of points. ''' def __init__(self, list_of_points) -> None: self.points = list_of_points", "- Point: the point with the higher pheromone trail. ''' def get_best_point(self): best_point", "Method that returns the best ant and it's cost with respect to the", "that has a position and a pheromone that leads to the point. '''", "\" \" + str(point) + \" \" return \"memory: \" + memory +", "search around the current position of an ant. ''' def local_search(self, function): for", "point.get_pheromone() return total ''' Method that returns the list of points. Return: -", "point_list in self.points: point_list.evaporate_pheromone(self.p) ''' Method in which the ants in the colony", "\"current location\" + location ''' Class PointList. A list that contains points. '''", "+ \" and \" + \"current location\" + location ''' Class PointList. A", "str(point) + \" \" location = \"\" for point in self.current_localization: location +=", "\" + str(self.pheromone) ''' Class Ant. An ant is an object that has", "memory + \" and \" + \"current location\" + location ''' Class PointList.", "list_of_points) -> None: self.points = list_of_points ''' Method that returns the point object", "ants of the colony. - q: A constant. - evaporation_rate: A constant to", "def evaporate_pheromone(self, p): for point in self.points: point.set_pheromone((1-p)*point.get_pheromone()) ''' Class ACO. Class to", "''' def assign_point(self, point): self.current_localization.append(point) ''' Method that updates the pheromone of the", "cost = ant_cost best_ant = ant return best_ant, cost ''' Method that does", "on a probabilistic desition. ''' def probabilistic_construction(self): for ant in self.ants: ant.clear_location() if(random()", "ant location. ''' def clear_location(self): self.current_localization = list() ''' Method to get the", "desition. ''' def probabilistic_construction(self): for ant in self.ants: ant.clear_location() if(random() > 1 -", "pheromones. ''' def get_total_pheromones(self): total = 0 for point in self.points: total +=", "''' Method that returns the list of points. Return: - list: the the", "> self.memory_limit ): del self.memory[0] return True ''' Method that returns the string", "[Point(uniform(0, 2*pi),1/2) for _ in range(0,int(n/2))] return (theta) self.number_params = num_params self.num_iterations =", "position. ''' def get_location(self): output_list = list() for point in self.current_localization: output_list.append(point.get_point()) return", "if(cost < best_cost): best_location = ant.get_location() best_ant = ant best_cost = cost return", "cost of the best ant. ''' def get_best_ant(self, function): best_ant = self.ants[0] cost", "self.memory = list() self.memory_limit = memory_limit self.current_localization = list() ''' Method to clear", "point. Return: - point: The coordinates of the point. ''' def get_point(self): return", "The pheromone of the point. ''' def get_pheromone(self): return self.pheromone ''' Method to", "in range(len(self.current_localization)): self.current_localization[i].set_point(new_location[i]) ''' Method that adds a point to the list that", "of the list of points. Return: - float: the total pf pheromones. '''", "first_guess_linear(n): [Point(uniform(interval[0],interval[1]), 1/2) for _ in range(discrete_points)] theta = [Point(uniform(0, pi),1/2) for _", "q self.p = evaporation_rate self.ants = [Ant(num_params) for _ in range(0, number_ants)] for", "= list() self.memory_limit = memory_limit self.current_localization = list() ''' Method to clear the", "< cost): cost = ant_cost best_ant = ant return best_ant, cost ''' Method", "move, forget/remember previous visited places and return it's location. ''' class Ant(): '''", "the ant. Params: - new_location: a list that contains the coordinates of the", "list that contains the coordinates of the new location. ''' def update_location(self, new_location):", "''' The constructor of the class. Params: - num_params: the number of dimentios", "point. Return: - string: the string representation of the point. ''' def __str__(self):", "best_location = best_ant.get_location() self.update_pheromone(best_ant, best_cost) for i in range(self.num_iterations): self.probabilistic_construction() self.local_search(fx) ant, cost", "added to the memory and False otherwise. ''' def set_memory(self, point): for p", "the pheromone that leads to the point. ''' def __init__(self, point, pheromone) ->", "for point in self.points: total += point.get_pheromone() return total ''' Method that returns", "a pheromone that leads to the point. ''' class Point(): ''' The constructor", "with respect to the cost function. Return: - Ant: the best ant in", "+ \" \" return \"memory: \" + memory + \" and \" +", "''' def __init__(self, point, pheromone) -> None: self.point = point self.pheromone = pheromone", "ant. Params: - error: The error induced by the best solution in the", "= \"\" for point in self.memory: memory += \" \" + str(point) +", "self.current_localization = list() ''' Method to clear the ant location. ''' def clear_location(self):", "best_point = Point(0,0) for point in self.points: if(point.get_pheromone() > best_point.get_pheromone()): best_point = point", "respect of the given function. ''' class ACO(): ''' The constructor of the", "''' def set_pheromone(self, pheromone): self.pheromone = pheromone ''' Method to set a coordinate", "the pheromone. - num_iterations (optional): The number of iterations of the algorithm. '''", "of coordinates of the ant position. ''' def get_location(self): output_list = list() for", "- number_ants: The number of ants of the colony. - q: A constant.", "ant. ''' def assign_point(self, point): self.current_localization.append(point) ''' Method that updates the pheromone of", "PointList. A list that contains points. ''' class PointsList(): ''' The constructor of", "object that has the higher pheromone. Return: - Point: the point with the", "the new location. ''' def update_location(self, new_location): for i in range(len(self.current_localization)): self.current_localization[i].set_point(new_location[i]) '''", "constant to control the evaporation of the pheromone. - num_iterations (optional): The number", "of the point. ''' def set_pheromone(self, pheromone): self.pheromone = pheromone ''' Method to", "None: def first_guess_linear(n): [Point(uniform(interval[0],interval[1]), 1/2) for _ in range(discrete_points)] theta = [Point(uniform(0, pi),1/2)", "- point: a coordinate. - pheromone: the pheromone that leads to the point.", "self.points ''' Method that evaporates the pheromones in the points. ''' def evaporate_pheromone(self,", "colony. ''' def update_pheromone(self, ant, cost): ant.update_pheromone(cost) for point_list in self.points: point_list.evaporate_pheromone(self.p) '''", "of the point. ''' def set_point(self, point): self.point = point ''' Method that", "Method to save a new location in the ant memory. Params: - point:", "''' def __init__(self, num_params, discrete_points, interval, number_ants, q, evaporation_rate, num_iterations = 50) ->", "class Point(): ''' The constructor of the class. Params: - point: a coordinate.", "of ants of the colony. - q: A constant. - evaporation_rate: A constant", "''' Method that evaporates the pheromones in the points. ''' def evaporate_pheromone(self, p):", "range(0,int(n/2))] return (theta) self.number_params = num_params self.num_iterations = num_iterations self.discrete_points = discrete_points self.points", "''' Class Ant. An ant is an object that has a position, a", "the pheromone of the ants in the colony. ''' def update_pheromone(self, ant, cost):", "to get the coordinates of the ant location. Return: - list: the list", "- discrete_points: the number of discrete points to sample. - interval: an interval", "pheromone of the ants in the colony. ''' def update_pheromone(self, ant, cost): ant.update_pheromone(cost)", "interval, number_ants, q, evaporation_rate, num_iterations = 50) -> None: def first_guess_linear(n): [Point(uniform(interval[0],interval[1]), 1/2)", "self.ants = [Ant(num_params) for _ in range(0, number_ants)] for _ in range(0,self.number_params): self.points.append(PointsList(first_guess_linear(discrete_points)))", "evaporation_rate: A constant to control the evaporation of the pheromone. - num_iterations (optional):", "higher pheromone trail. ''' def get_best_point(self): best_point = Point(0,0) for point in self.points:", "point: The coordinates of the point. ''' def get_point(self): return self.point ''' Method", "self.memory_limit ): del self.memory[0] return True ''' Method that returns the string representation", "the pheromones of the list of points. Return: - float: the total pf", "cost) if(cost < best_cost): best_location = ant.get_location() best_ant = ant best_cost = cost", "point: The pheromone of the point. ''' def get_pheromone(self): return self.pheromone ''' Method", "def clear_location(self): self.current_localization = list() ''' Method to get the coordinates of the", "that evaporates the pheromones in the points. ''' def evaporate_pheromone(self, p): for point", "function): for ant in self.ants: res = minimize(function, ant.get_location(), method='COBYLA', options={\"maxiter\":5}) ant.update_location(res.x) '''", "list_of_points ''' Method that returns the point object that has the higher pheromone.", "to run the ant colony optimization with respect of the given function. '''", "discrete_points, interval, number_ants, q, evaporation_rate, num_iterations = 50) -> None: def first_guess_linear(n): [Point(uniform(interval[0],interval[1]),", "def get_location(self): output_list = list() for point in self.current_localization: output_list.append(point.get_point()) return output_list '''", "to a location based on the pheromone trail or on a probabilistic desition.", "probabilistic_construction(self): for ant in self.ants: ant.clear_location() if(random() > 1 - self.q): for point_list", "= list() for point in self.current_localization: output_list.append(point.get_point()) return output_list ''' Method to update", "of the point. ''' def get_point(self): return self.point ''' Method to get the", "in range(self.num_iterations): self.probabilistic_construction() self.local_search(fx) ant, cost = self.get_best_ant(fx) self.update_pheromone(ant, cost) if(cost < best_cost):", "point_list.get_list_points(): if(random() > (point.get_pheromone())/point_list.get_total_pheromones()): ant_asigned = ant.set_memory(point) if (ant_asigned): ant.assign_point(point) break ''' Method", "best_point ''' Method that returns the sum of the pheromones of the list", "the best ant in the colony. - float: the cost of the best", "the the list of points. ''' def get_list_points(self): return self.points ''' Method that", "best_point.get_pheromone()): best_point = point return best_point ''' Method that returns the sum of", "on the pheromone trail or on a probabilistic desition. ''' def probabilistic_construction(self): for", "of the class. Params: - num_params: the number of dimentios of the objective", "probabilistic desition. ''' def probabilistic_construction(self): for ant in self.ants: ant.clear_location() if(random() > 1", "from random import random, uniform from scipy.optimize import minimize from math import e,", "if(point.get_pheromone() > best_point.get_pheromone()): best_point = point return best_point ''' Method that returns the", "(optional): The number of iterations of the algorithm. ''' def __init__(self, num_params, discrete_points,", "can move, forget/remember previous visited places and return it's location. ''' class Ant():", "point_list in self.points: ant_asigned = ant.set_memory(point_list.get_best_point()) ant.assign_point(point_list.get_best_point()) else: for point_list in self.points: for", "An ant can move, forget/remember previous visited places and return it's location. '''", "of the pheromone. - num_iterations (optional): The number of iterations of the algorithm.", "points. ''' def __init__(self, list_of_points) -> None: self.points = list_of_points ''' Method that", "an interval to draw number from. - number_ants: The number of ants of", "cost function. Return: -list: a list with the best point find by the", "representation of the point. Return: - string: the string representation of the point.", "''' Method to set the pheromone of the point. Params: - pheromone: The", "location += \" \" + str(point) + \" \" return \"memory: \" +", "PointsList(): ''' The constructor of the class. Params: - list_of_points: the list of", "break ''' Method to run the PSO heuristic over the objective function. Params:", "that returns the string representation of the bat. Return: - string: the string", "a new location in the ant memory. Params: - point: the point that", "sqrt,cos,pi ''' Class Point. A point is an object that has a position", "colony. ''' def update_pheromone(self, error): for point in self.current_localization: point.set_pheromone(point.get_pheromone() + (1/error)) '''", "object that has a position, a memory and a limit for it's memory.", "decides to move to a location based on the pheromone trail or on", "is an object that has a position and a pheromone that leads to", "the point. Return: - point: The pheromone of the point. ''' def get_pheromone(self):", "i in range(len(self.current_localization)): self.current_localization[i].set_point(new_location[i]) ''' Method that adds a point to the list", "return (theta) self.number_params = num_params self.num_iterations = num_iterations self.discrete_points = discrete_points self.points =", "- point: The coordinates of the point. ''' def get_point(self): return self.point '''", "self.memory: if(point.get_point() == p.get_point()): return False self.memory.append(point) if( len(self.memory) > self.memory_limit ): del", "set the pheromone of the point. Params: - pheromone: The pheromone of the", "in the ant memory. Params: - point: the point that will be saved", "''' def run(self,fx): self.probabilistic_construction() self.local_search(fx) best_ant, best_cost = self.get_best_ant(fx) best_location = best_ant.get_location() self.update_pheromone(best_ant,", "point ''' Method that returns the string representation of the point. Return: -", "get the coordinates of the ant location. Return: - list: the list of", "the ant. Params: - error: The error induced by the best solution in", "= self.ants[0] cost = function(best_ant.get_location()) for ant in self.ants: ant_cost = (function(ant.get_location())) if(ant_cost", "(1/error)) ''' Method to save a new location in the ant memory. Params:", "= (function(ant.get_location())) if(ant_cost < cost): cost = ant_cost best_ant = ant return best_ant,", "get_location(self): output_list = list() for point in self.current_localization: output_list.append(point.get_point()) return output_list ''' Method", "for ant in self.ants: res = minimize(function, ant.get_location(), method='COBYLA', options={\"maxiter\":5}) ant.update_location(res.x) ''' Method", "- pheromone: The pheromone of the point. ''' def set_pheromone(self, pheromone): self.pheromone =", "\"memory: \" + memory + \" and \" + \"current location\" + location", "that contains the coordinates of the new location. ''' def update_location(self, new_location): for", "for i in range(self.num_iterations): self.probabilistic_construction() self.local_search(fx) ant, cost = self.get_best_ant(fx) self.update_pheromone(ant, cost) if(cost", "self.local_search(fx) ant, cost = self.get_best_ant(fx) self.update_pheromone(ant, cost) if(cost < best_cost): best_location = ant.get_location()", "self.probabilistic_construction() self.local_search(fx) ant, cost = self.get_best_ant(fx) self.update_pheromone(ant, cost) if(cost < best_cost): best_location =", "pheromone of the point. Return: - point: The pheromone of the point. '''", "return \"point: \" + str(self.point) + \",\" + \"pheromone: \" + str(self.pheromone) '''", "the pheromone of the point. Params: - pheromone: The pheromone of the point.", "that returns the string representation of the point. Return: - string: the string", "''' Method that updates the pheromone of the current location point of the", "error): for point in self.current_localization: point.set_pheromone(point.get_pheromone() + (1/error)) ''' Method to save a", "Method that returns the string representation of the point. Return: - string: the", "for _ in range(0,int(n/2))] return (theta) self.number_params = num_params self.num_iterations = num_iterations self.discrete_points", "for point_list in self.points: for point in point_list.get_list_points(): if(random() > (point.get_pheromone())/point_list.get_total_pheromones()): ant_asigned =", "self.update_pheromone(best_ant, best_cost) for i in range(self.num_iterations): self.probabilistic_construction() self.local_search(fx) ant, cost = self.get_best_ant(fx) self.update_pheromone(ant,", "''' def get_location(self): output_list = list() for point in self.current_localization: output_list.append(point.get_point()) return output_list", "''' Method to update the position of the ant. Params: - new_location: a", "the class. Params: - list_of_points: the list of points. ''' def __init__(self, list_of_points)", "+ (1/error)) ''' Method to save a new location in the ant memory.", "the best ant and it's cost with respect to the cost function. Return:", "get_best_ant(self, function): best_ant = self.ants[0] cost = function(best_ant.get_location()) for ant in self.ants: ant_cost", "point self.pheromone = pheromone ''' Method to get the coordinates of the point.", "''' class Ant(): ''' The constructor of the class. Params: - memory_limit: the", "Ant(): ''' The constructor of the class. Params: - memory_limit: the maximum number", "+ location ''' Class PointList. A list that contains points. ''' class PointsList():", "to control the evaporation of the pheromone. - num_iterations (optional): The number of", "points. ''' def evaporate_pheromone(self, p): for point in self.points: point.set_pheromone((1-p)*point.get_pheromone()) ''' Class ACO.", "coordinate of the point. Params: - point: The coordinate of the point. '''", "coordinate of the point. ''' def set_point(self, point): self.point = point ''' Method", "theta = [Point(uniform(0, pi),1/2) for _ in range(0,int(n/2))] + [Point(uniform(0, 2*pi),1/2) for _", "fx: the cost function. Return: -list: a list with the best point find", "''' The constructor of the class. Params: - list_of_points: the list of points.", "A point is an object that has a position and a pheromone that", "memory_limit: the maximum number of previous visited placed that an ant can remember.", "point_list in self.points: for point in point_list.get_list_points(): if(random() > (point.get_pheromone())/point_list.get_total_pheromones()): ant_asigned = ant.set_memory(point)", "''' Method that returns the point object that has the higher pheromone. Return:", "def get_pheromone(self): return self.pheromone ''' Method to set the pheromone of the point.", "optimization with respect of the given function. ''' class ACO(): ''' The constructor", "the pheromone trail or on a probabilistic desition. ''' def probabilistic_construction(self): for ant", "function. - discrete_points: the number of discrete points to sample. - interval: an", "get_point(self): return self.point ''' Method to get the pheromone of the point. Return:", "the objective function. Params: - fx: the cost function. Return: -list: a list", "if (ant_asigned): ant.assign_point(point) break ''' Method to run the PSO heuristic over the", "and False otherwise. ''' def set_memory(self, point): for p in self.memory: if(point.get_point() ==", "True: if the point was added to the memory and False otherwise. '''", "\" + str(self.point) + \",\" + \"pheromone: \" + str(self.pheromone) ''' Class Ant.", "pheromone. Return: - Point: the point with the higher pheromone trail. ''' def", "''' Method to set a coordinate of the point. Params: - point: The", "self.current_localization: point.set_pheromone(point.get_pheromone() + (1/error)) ''' Method to save a new location in the", "ant_cost = (function(ant.get_location())) if(ant_cost < cost): cost = ant_cost best_ant = ant return", "self.ants: ant.clear_location() if(random() > 1 - self.q): for point_list in self.points: ant_asigned =", "''' def update_pheromone(self, ant, cost): ant.update_pheromone(cost) for point_list in self.points: point_list.evaporate_pheromone(self.p) ''' Method", "point: a coordinate. - pheromone: the pheromone that leads to the point. '''", "in self.current_localization: point.set_pheromone(point.get_pheromone() + (1/error)) ''' Method to save a new location in", "in self.ants: ant.clear_location() if(random() > 1 - self.q): for point_list in self.points: ant_asigned", "draw number from. - number_ants: The number of ants of the colony. -", "the ant location. Return: - list: the list of coordinates of the ant", "+ \",\" + \"pheromone: \" + str(self.pheromone) ''' Class Ant. An ant is", "Method to run the PSO heuristic over the objective function. Params: - fx:", "for point in self.current_localization: location += \" \" + str(point) + \" \"", "list() self.memory_limit = memory_limit self.current_localization = list() ''' Method to clear the ant", "point in self.points: point.set_pheromone((1-p)*point.get_pheromone()) ''' Class ACO. Class to run the ant colony", "to the cost function. Return: - Ant: the best ant in the colony.", "does a local search around the current position of an ant. ''' def", "the point. ''' def set_point(self, point): self.point = point ''' Method that returns", "best_ant.get_location() self.update_pheromone(best_ant, best_cost) for i in range(self.num_iterations): self.probabilistic_construction() self.local_search(fx) ant, cost = self.get_best_ant(fx)", "the sum of the pheromones of the list of points. Return: - float:", "- error: The error induced by the best solution in the colony. '''", "\" \" + str(point) + \" \" location = \"\" for point in", "string representation of the bat. Return: - string: the string representation of the", "in the points. ''' def evaporate_pheromone(self, p): for point in self.points: point.set_pheromone((1-p)*point.get_pheromone()) '''", "will be saved in the ant memory Return: - True: if the point", "cost = self.get_best_ant(fx) self.update_pheromone(ant, cost) if(cost < best_cost): best_location = ant.get_location() best_ant =", "limit for it's memory. An ant can move, forget/remember previous visited places and", "point with the higher pheromone trail. ''' def get_best_point(self): best_point = Point(0,0) for", "''' Method to run the PSO heuristic over the objective function. Params: -", "list of points. Return: - list: the the list of points. ''' def", "> best_point.get_pheromone()): best_point = point return best_point ''' Method that returns the sum", "number of dimentios of the objective function. - discrete_points: the number of discrete", "_ in range(0,self.number_params): self.points.append(PointsList(first_guess_linear(discrete_points))) ''' Method that returns the best ant and it's", "the ants in the colony decides to move to a location based on", "point. ''' class Point(): ''' The constructor of the class. Params: - point:", "point. ''' def __str__(self): return \"point: \" + str(self.point) + \",\" + \"pheromone:", "get_pheromone(self): return self.pheromone ''' Method to set the pheromone of the point. Params:", "of the algorithm. ''' def __init__(self, num_params, discrete_points, interval, number_ants, q, evaporation_rate, num_iterations", "best_ant = self.ants[0] cost = function(best_ant.get_location()) for ant in self.ants: ant_cost = (function(ant.get_location()))", "point. ''' def set_point(self, point): self.point = point ''' Method that returns the", "of the bat. ''' def __str__(self): memory = \"\" for point in self.memory:", "point to the list that contains the location of the ant. ''' def", "ant. Params: - new_location: a list that contains the coordinates of the new", "self.pheromone ''' Method to set the pheromone of the point. Params: - pheromone:", "self.memory.append(point) if( len(self.memory) > self.memory_limit ): del self.memory[0] return True ''' Method that", "+ str(self.point) + \",\" + \"pheromone: \" + str(self.pheromone) ''' Class Ant. An", "del self.memory[0] return True ''' Method that returns the string representation of the", "''' class Point(): ''' The constructor of the class. Params: - point: a", "class ACO(): ''' The constructor of the class. Params: - num_params: the number", "if the point was added to the memory and False otherwise. ''' def", "function. Return: - Ant: the best ant in the colony. - float: the", "ant can remember. ''' def __init__(self, memory_limit) -> None: self.memory = list() self.memory_limit", "Params: - point: the point that will be saved in the ant memory", "- num_iterations (optional): The number of iterations of the algorithm. ''' def __init__(self,", "has a position and a pheromone that leads to the point. ''' class", "def get_total_pheromones(self): total = 0 for point in self.points: total += point.get_pheromone() return", "point is an object that has a position and a pheromone that leads", "with the higher pheromone trail. ''' def get_best_point(self): best_point = Point(0,0) for point", "best point found by the colony. ''' def run(self,fx): self.probabilistic_construction() self.local_search(fx) best_ant, best_cost", "total = 0 for point in self.points: total += point.get_pheromone() return total '''", "+ str(point) + \" \" return \"memory: \" + memory + \" and", "def update_location(self, new_location): for i in range(len(self.current_localization)): self.current_localization[i].set_point(new_location[i]) ''' Method that adds a", "\",\" + \"pheromone: \" + str(self.pheromone) ''' Class Ant. An ant is an", "Return: - list: the list of coordinates of the ant position. ''' def", "position, a memory and a limit for it's memory. An ant can move,", "the current location point of the ant. Params: - error: The error induced", "from scipy.optimize import minimize from math import e, sqrt,cos,pi ''' Class Point. A", "len(self.memory) > self.memory_limit ): del self.memory[0] return True ''' Method that returns the", "string: the string representation of the bat. ''' def __str__(self): memory = \"\"", "trail. ''' def get_best_point(self): best_point = Point(0,0) for point in self.points: if(point.get_pheromone() >", "ant in self.ants: ant_cost = (function(ant.get_location())) if(ant_cost < cost): cost = ant_cost best_ant", "list() ''' Method to clear the ant location. ''' def clear_location(self): self.current_localization =", "evaporation_rate, num_iterations = 50) -> None: def first_guess_linear(n): [Point(uniform(interval[0],interval[1]), 1/2) for _ in", "point was added to the memory and False otherwise. ''' def set_memory(self, point):", "string representation of the point. ''' def __str__(self): return \"point: \" + str(self.point)", "function. Params: - fx: the cost function. Return: -list: a list with the", "pheromone trail or on a probabilistic desition. ''' def probabilistic_construction(self): for ant in", "+ memory + \" and \" + \"current location\" + location ''' Class", "def update_pheromone(self, ant, cost): ant.update_pheromone(cost) for point_list in self.points: point_list.evaporate_pheromone(self.p) ''' Method in", "of the point. Return: - point: The pheromone of the point. ''' def", "ant is an object that has a position, a memory and a limit", "update_location(self, new_location): for i in range(len(self.current_localization)): self.current_localization[i].set_point(new_location[i]) ''' Method that adds a point", "self.points: total += point.get_pheromone() return total ''' Method that returns the list of", "''' def __init__(self, memory_limit) -> None: self.memory = list() self.memory_limit = memory_limit self.current_localization", "ant. ''' def local_search(self, function): for ant in self.ants: res = minimize(function, ant.get_location(),", "\"\" for point in self.current_localization: location += \" \" + str(point) + \"", "The constructor of the class. Params: - memory_limit: the maximum number of previous", "pheromone ''' Method to set a coordinate of the point. Params: - point:", "str(self.pheromone) ''' Class Ant. An ant is an object that has a position,", "Class PointList. A list that contains points. ''' class PointsList(): ''' The constructor", "total pf pheromones. ''' def get_total_pheromones(self): total = 0 for point in self.points:", "self.current_localization = list() ''' Method to get the coordinates of the ant location.", "and a pheromone that leads to the point. ''' class Point(): ''' The", "pf pheromones. ''' def get_total_pheromones(self): total = 0 for point in self.points: total", "the point. ''' def set_pheromone(self, pheromone): self.pheromone = pheromone ''' Method to set", "has the higher pheromone. Return: - Point: the point with the higher pheromone", "pheromone that leads to the point. ''' class Point(): ''' The constructor of", "= point self.pheromone = pheromone ''' Method to get the coordinates of the", "update_pheromone(self, error): for point in self.current_localization: point.set_pheromone(point.get_pheromone() + (1/error)) ''' Method to save", "''' Method that does a local search around the current position of an", "the point. ''' def __init__(self, point, pheromone) -> None: self.point = point self.pheromone", "self.discrete_points = discrete_points self.points = list() self.q = q self.p = evaporation_rate self.ants", "the cost function. Return: - Ant: the best ant in the colony. -", "Return: - Point: the point with the higher pheromone trail. ''' def get_best_point(self):", "it's memory. An ant can move, forget/remember previous visited places and return it's", "to move to a location based on the pheromone trail or on a", "+ \" \" location = \"\" for point in self.current_localization: location += \"", "''' def get_best_point(self): best_point = Point(0,0) for point in self.points: if(point.get_pheromone() > best_point.get_pheromone()):", "location. ''' def update_location(self, new_location): for i in range(len(self.current_localization)): self.current_localization[i].set_point(new_location[i]) ''' Method that", "ant memory. Params: - point: the point that will be saved in the", "the ants in the colony. ''' def update_pheromone(self, ant, cost): ant.update_pheromone(cost) for point_list", "the cost function. Return: -list: a list with the best point find by", "to the point. ''' def __init__(self, point, pheromone) -> None: self.point = point", "best_ant, best_cost = self.get_best_ant(fx) best_location = best_ant.get_location() self.update_pheromone(best_ant, best_cost) for i in range(self.num_iterations):", "def first_guess_linear(n): [Point(uniform(interval[0],interval[1]), 1/2) for _ in range(discrete_points)] theta = [Point(uniform(0, pi),1/2) for", "points. ''' def get_list_points(self): return self.points ''' Method that evaporates the pheromones in", "Return: - list: the the list of points. ''' def get_list_points(self): return self.points", "list_of_points: the list of points. ''' def __init__(self, list_of_points) -> None: self.points =", "in self.memory: if(point.get_point() == p.get_point()): return False self.memory.append(point) if( len(self.memory) > self.memory_limit ):", "of the current location point of the ant. Params: - error: The error", "colony. ''' def run(self,fx): self.probabilistic_construction() self.local_search(fx) best_ant, best_cost = self.get_best_ant(fx) best_location = best_ant.get_location()", "constructor of the class. Params: - point: a coordinate. - pheromone: the pheromone", "The constructor of the class. Params: - num_params: the number of dimentios of", "memory and False otherwise. ''' def set_memory(self, point): for p in self.memory: if(point.get_point()", "representation of the point. ''' def __str__(self): return \"point: \" + str(self.point) +", "number of discrete points to sample. - interval: an interval to draw number", "self.p = evaporation_rate self.ants = [Ant(num_params) for _ in range(0, number_ants)] for _", "Params: - pheromone: The pheromone of the point. ''' def set_pheromone(self, pheromone): self.pheromone", "function. ''' class ACO(): ''' The constructor of the class. Params: - num_params:", "in self.ants: ant_cost = (function(ant.get_location())) if(ant_cost < cost): cost = ant_cost best_ant =", "location of the ant. ''' def assign_point(self, point): self.current_localization.append(point) ''' Method that updates", "- list: the list of coordinates of the ant position. ''' def get_location(self):", "clear_location(self): self.current_localization = list() ''' Method to get the coordinates of the ant", "-> None: self.points = list_of_points ''' Method that returns the point object that", "iterations of the algorithm. ''' def __init__(self, num_params, discrete_points, interval, number_ants, q, evaporation_rate,", "ACO. Class to run the ant colony optimization with respect of the given", "the higher pheromone trail. ''' def get_best_point(self): best_point = Point(0,0) for point in", "in which the ants in the colony decides to move to a location", "return total ''' Method that returns the list of points. Return: - list:", "that will be saved in the ant memory Return: - True: if the", "and a limit for it's memory. An ant can move, forget/remember previous visited", "Point: the point with the higher pheromone trail. ''' def get_best_point(self): best_point =", "coordinates of the point. ''' def get_point(self): return self.point ''' Method to get", "find by the colony. -float: the cost of the best point found by", "function(best_ant.get_location()) for ant in self.ants: ant_cost = (function(ant.get_location())) if(ant_cost < cost): cost =", "the list that contains the location of the ant. ''' def assign_point(self, point):", "- point: The coordinate of the point. ''' def set_point(self, point): self.point =", "for point in self.points: if(point.get_pheromone() > best_point.get_pheromone()): best_point = point return best_point '''", "by the best solution in the colony. ''' def update_pheromone(self, error): for point", "self.current_localization: location += \" \" + str(point) + \" \" return \"memory: \"", "def get_list_points(self): return self.points ''' Method that evaporates the pheromones in the points.", "for _ in range(discrete_points)] theta = [Point(uniform(0, pi),1/2) for _ in range(0,int(n/2))] +", "ant.set_memory(point) if (ant_asigned): ant.assign_point(point) break ''' Method to run the PSO heuristic over", "= list() ''' Method to get the coordinates of the ant location. Return:", "returns the sum of the pheromones of the list of points. Return: -", "of the best point found by the colony. ''' def run(self,fx): self.probabilistic_construction() self.local_search(fx)", "points. Return: - list: the the list of points. ''' def get_list_points(self): return", "to draw number from. - number_ants: The number of ants of the colony.", "self.point ''' Method to get the pheromone of the point. Return: - point:", "= discrete_points self.points = list() self.q = q self.p = evaporation_rate self.ants =", "of the ants in the colony. ''' def update_pheromone(self, ant, cost): ant.update_pheromone(cost) for", "= Point(0,0) for point in self.points: if(point.get_pheromone() > best_point.get_pheromone()): best_point = point return", "return best_point ''' Method that returns the sum of the pheromones of the", "= list() self.q = q self.p = evaporation_rate self.ants = [Ant(num_params) for _", "str(self.point) + \",\" + \"pheromone: \" + str(self.pheromone) ''' Class Ant. An ant", "the ant. ''' def assign_point(self, point): self.current_localization.append(point) ''' Method that updates the pheromone", "the cost of the best ant. ''' def get_best_ant(self, function): best_ant = self.ants[0]", "pheromone) -> None: self.point = point self.pheromone = pheromone ''' Method to get", "''' def __str__(self): return \"point: \" + str(self.point) + \",\" + \"pheromone: \"", "''' def probabilistic_construction(self): for ant in self.ants: ant.clear_location() if(random() > 1 - self.q):", "the best point found by the colony. ''' def run(self,fx): self.probabilistic_construction() self.local_search(fx) best_ant,", "function): best_ant = self.ants[0] cost = function(best_ant.get_location()) for ant in self.ants: ant_cost =", "= 0 for point in self.points: total += point.get_pheromone() return total ''' Method", "location\" + location ''' Class PointList. A list that contains points. ''' class", "point: The coordinate of the point. ''' def set_point(self, point): self.point = point", "- q: A constant. - evaporation_rate: A constant to control the evaporation of", "1 - self.q): for point_list in self.points: ant_asigned = ant.set_memory(point_list.get_best_point()) ant.assign_point(point_list.get_best_point()) else: for", "of the point. Params: - point: The coordinate of the point. ''' def", "> 1 - self.q): for point_list in self.points: ant_asigned = ant.set_memory(point_list.get_best_point()) ant.assign_point(point_list.get_best_point()) else:", "pheromone of the point. ''' def set_pheromone(self, pheromone): self.pheromone = pheromone ''' Method", "the best point find by the colony. -float: the cost of the best", "the point. Return: - string: the string representation of the point. ''' def", "dimentios of the objective function. - discrete_points: the number of discrete points to", "get_best_point(self): best_point = Point(0,0) for point in self.points: if(point.get_pheromone() > best_point.get_pheromone()): best_point =", "or on a probabilistic desition. ''' def probabilistic_construction(self): for ant in self.ants: ant.clear_location()", "ant.set_memory(point_list.get_best_point()) ant.assign_point(point_list.get_best_point()) else: for point_list in self.points: for point in point_list.get_list_points(): if(random() >", "- self.q): for point_list in self.points: ant_asigned = ant.set_memory(point_list.get_best_point()) ant.assign_point(point_list.get_best_point()) else: for point_list", "\"point: \" + str(self.point) + \",\" + \"pheromone: \" + str(self.pheromone) ''' Class", "that has a position, a memory and a limit for it's memory. An", "point in self.memory: memory += \" \" + str(point) + \" \" location", "__str__(self): return \"point: \" + str(self.point) + \",\" + \"pheromone: \" + str(self.pheromone)", "for ant in self.ants: ant.clear_location() if(random() > 1 - self.q): for point_list in", "self.ants: ant_cost = (function(ant.get_location())) if(ant_cost < cost): cost = ant_cost best_ant = ant", "Return: - True: if the point was added to the memory and False", "ants in the colony. ''' def update_pheromone(self, ant, cost): ant.update_pheromone(cost) for point_list in", "the number of discrete points to sample. - interval: an interval to draw", "def get_best_point(self): best_point = Point(0,0) for point in self.points: if(point.get_pheromone() > best_point.get_pheromone()): best_point", "''' def __str__(self): memory = \"\" for point in self.memory: memory += \"", "the ant memory Return: - True: if the point was added to the", "adds a point to the list that contains the location of the ant.", "return best_ant, cost ''' Method that does a local search around the current", "output_list.append(point.get_point()) return output_list ''' Method to update the position of the ant. Params:", "Method that returns the list of points. Return: - list: the the list", "def probabilistic_construction(self): for ant in self.ants: ant.clear_location() if(random() > 1 - self.q): for", "(theta) self.number_params = num_params self.num_iterations = num_iterations self.discrete_points = discrete_points self.points = list()", "Params: - memory_limit: the maximum number of previous visited placed that an ant", "-> None: self.memory = list() self.memory_limit = memory_limit self.current_localization = list() ''' Method", "_ in range(0,int(n/2))] return (theta) self.number_params = num_params self.num_iterations = num_iterations self.discrete_points =", "self.ants[0] cost = function(best_ant.get_location()) for ant in self.ants: ant_cost = (function(ant.get_location())) if(ant_cost <", "self.memory_limit = memory_limit self.current_localization = list() ''' Method to clear the ant location.", "- string: the string representation of the bat. ''' def __str__(self): memory =", "in self.current_localization: location += \" \" + str(point) + \" \" return \"memory:", "of iterations of the algorithm. ''' def __init__(self, num_params, discrete_points, interval, number_ants, q,", "memory_limit) -> None: self.memory = list() self.memory_limit = memory_limit self.current_localization = list() '''", "to save a new location in the ant memory. Params: - point: the", "number from. - number_ants: The number of ants of the colony. - q:", "= point return best_point ''' Method that returns the sum of the pheromones", "colony decides to move to a location based on the pheromone trail or", "constructor of the class. Params: - num_params: the number of dimentios of the", "in self.current_localization: output_list.append(point.get_point()) return output_list ''' Method to update the position of the", "the number of dimentios of the objective function. - discrete_points: the number of", "range(discrete_points)] theta = [Point(uniform(0, pi),1/2) for _ in range(0,int(n/2))] + [Point(uniform(0, 2*pi),1/2) for", "The number of iterations of the algorithm. ''' def __init__(self, num_params, discrete_points, interval,", "bat. ''' def __str__(self): memory = \"\" for point in self.memory: memory +=", "by the colony. -float: the cost of the best point found by the", "pheromone: the pheromone that leads to the point. ''' def __init__(self, point, pheromone)", "memory = \"\" for point in self.memory: memory += \" \" + str(point)", "cost): cost = ant_cost best_ant = ant return best_ant, cost ''' Method that", "to sample. - interval: an interval to draw number from. - number_ants: The", "coordinate. - pheromone: the pheromone that leads to the point. ''' def __init__(self,", "The number of ants of the colony. - q: A constant. - evaporation_rate:", "previous visited placed that an ant can remember. ''' def __init__(self, memory_limit) ->", "list() self.q = q self.p = evaporation_rate self.ants = [Ant(num_params) for _ in", "= function(best_ant.get_location()) for ant in self.ants: ant_cost = (function(ant.get_location())) if(ant_cost < cost): cost", "the bat. Return: - string: the string representation of the bat. ''' def", "= ant_cost best_ant = ant return best_ant, cost ''' Method that does a", "= evaporation_rate self.ants = [Ant(num_params) for _ in range(0, number_ants)] for _ in", "self.q): for point_list in self.points: ant_asigned = ant.set_memory(point_list.get_best_point()) ant.assign_point(point_list.get_best_point()) else: for point_list in", "in the colony decides to move to a location based on the pheromone", "point): self.current_localization.append(point) ''' Method that updates the pheromone of the current location point", "ant can move, forget/remember previous visited places and return it's location. ''' class", "to get the pheromone of the point. Return: - point: The pheromone of", "A constant. - evaporation_rate: A constant to control the evaporation of the pheromone.", "An ant is an object that has a position, a memory and a", "p in self.memory: if(point.get_point() == p.get_point()): return False self.memory.append(point) if( len(self.memory) > self.memory_limit", "of the new location. ''' def update_location(self, new_location): for i in range(len(self.current_localization)): self.current_localization[i].set_point(new_location[i])", "that has the higher pheromone. Return: - Point: the point with the higher", "class. Params: - memory_limit: the maximum number of previous visited placed that an", "in self.memory: memory += \" \" + str(point) + \" \" location =", "i in range(self.num_iterations): self.probabilistic_construction() self.local_search(fx) ant, cost = self.get_best_ant(fx) self.update_pheromone(ant, cost) if(cost <", "points to sample. - interval: an interval to draw number from. - number_ants:", "new_location: a list that contains the coordinates of the new location. ''' def", "range(0,self.number_params): self.points.append(PointsList(first_guess_linear(discrete_points))) ''' Method that returns the best ant and it's cost with", "of the point. Params: - pheromone: The pheromone of the point. ''' def", "- string: the string representation of the point. ''' def __str__(self): return \"point:", "local search around the current position of an ant. ''' def local_search(self, function):", "for p in self.memory: if(point.get_point() == p.get_point()): return False self.memory.append(point) if( len(self.memory) >", "self.current_localization[i].set_point(new_location[i]) ''' Method that adds a point to the list that contains the", "ant_asigned = ant.set_memory(point_list.get_best_point()) ant.assign_point(point_list.get_best_point()) else: for point_list in self.points: for point in point_list.get_list_points():", "__init__(self, num_params, discrete_points, interval, number_ants, q, evaporation_rate, num_iterations = 50) -> None: def", "range(self.num_iterations): self.probabilistic_construction() self.local_search(fx) ant, cost = self.get_best_ant(fx) self.update_pheromone(ant, cost) if(cost < best_cost): best_location", "< best_cost): best_location = ant.get_location() best_ant = ant best_cost = cost return [best_location,self.num_iterations]", "self.point = point self.pheromone = pheromone ''' Method to get the coordinates of", "point found by the colony. ''' def run(self,fx): self.probabilistic_construction() self.local_search(fx) best_ant, best_cost =", "ant location. Return: - list: the list of coordinates of the ant position.", "that an ant can remember. ''' def __init__(self, memory_limit) -> None: self.memory =", "self.points = list_of_points ''' Method that returns the point object that has the", "total ''' Method that returns the list of points. Return: - list: the", "of the given function. ''' class ACO(): ''' The constructor of the class.", "q: A constant. - evaporation_rate: A constant to control the evaporation of the", "cost function. Return: - Ant: the best ant in the colony. - float:", "the ant memory. Params: - point: the point that will be saved in", "previous visited places and return it's location. ''' class Ant(): ''' The constructor", "ant_cost best_ant = ant return best_ant, cost ''' Method that does a local", "the best solution in the colony. ''' def update_pheromone(self, error): for point in", "-> None: def first_guess_linear(n): [Point(uniform(interval[0],interval[1]), 1/2) for _ in range(discrete_points)] theta = [Point(uniform(0,", "to clear the ant location. ''' def clear_location(self): self.current_localization = list() ''' Method", "str(point) + \" \" return \"memory: \" + memory + \" and \"", "class Ant(): ''' The constructor of the class. Params: - memory_limit: the maximum", "the class. Params: - num_params: the number of dimentios of the objective function.", "the string representation of the point. Return: - string: the string representation of", "for it's memory. An ant can move, forget/remember previous visited places and return", "res = minimize(function, ant.get_location(), method='COBYLA', options={\"maxiter\":5}) ant.update_location(res.x) ''' Method that updates the pheromone", "A list that contains points. ''' class PointsList(): ''' The constructor of the", "method='COBYLA', options={\"maxiter\":5}) ant.update_location(res.x) ''' Method that updates the pheromone of the ants in", "location. ''' class Ant(): ''' The constructor of the class. Params: - memory_limit:", "the colony. ''' def update_pheromone(self, ant, cost): ant.update_pheromone(cost) for point_list in self.points: point_list.evaporate_pheromone(self.p)", "pheromone of the point. ''' def get_pheromone(self): return self.pheromone ''' Method to set", "pi),1/2) for _ in range(0,int(n/2))] + [Point(uniform(0, 2*pi),1/2) for _ in range(0,int(n/2))] return", "range(0,int(n/2))] + [Point(uniform(0, 2*pi),1/2) for _ in range(0,int(n/2))] return (theta) self.number_params = num_params", "(point.get_pheromone())/point_list.get_total_pheromones()): ant_asigned = ant.set_memory(point) if (ant_asigned): ant.assign_point(point) break ''' Method to run the", "= best_ant.get_location() self.update_pheromone(best_ant, best_cost) for i in range(self.num_iterations): self.probabilistic_construction() self.local_search(fx) ant, cost =", "Params: - fx: the cost function. Return: -list: a list with the best", "point. Params: - point: The coordinate of the point. ''' def set_point(self, point):", "\" + str(point) + \" \" location = \"\" for point in self.current_localization:", "num_params, discrete_points, interval, number_ants, q, evaporation_rate, num_iterations = 50) -> None: def first_guess_linear(n):", "best_cost) for i in range(self.num_iterations): self.probabilistic_construction() self.local_search(fx) ant, cost = self.get_best_ant(fx) self.update_pheromone(ant, cost)", "''' def __init__(self, list_of_points) -> None: self.points = list_of_points ''' Method that returns", "current position of an ant. ''' def local_search(self, function): for ant in self.ants:", "Method to get the coordinates of the point. Return: - point: The coordinates", "discrete_points: the number of discrete points to sample. - interval: an interval to", "''' Method to save a new location in the ant memory. Params: -", "position of an ant. ''' def local_search(self, function): for ant in self.ants: res", "= self.get_best_ant(fx) self.update_pheromone(ant, cost) if(cost < best_cost): best_location = ant.get_location() best_ant = ant", "\" \" return \"memory: \" + memory + \" and \" + \"current", "''' def evaporate_pheromone(self, p): for point in self.points: point.set_pheromone((1-p)*point.get_pheromone()) ''' Class ACO. Class", "and it's cost with respect to the cost function. Return: - Ant: the", "(ant_asigned): ant.assign_point(point) break ''' Method to run the PSO heuristic over the objective", "return output_list ''' Method to update the position of the ant. Params: -", "in self.ants: res = minimize(function, ant.get_location(), method='COBYLA', options={\"maxiter\":5}) ant.update_location(res.x) ''' Method that updates", "self.memory: memory += \" \" + str(point) + \" \" location = \"\"", "list of points. Return: - float: the total pf pheromones. ''' def get_total_pheromones(self):", "that updates the pheromone of the current location point of the ant. Params:", "for point in self.current_localization: point.set_pheromone(point.get_pheromone() + (1/error)) ''' Method to save a new", "best point find by the colony. -float: the cost of the best point", "def __str__(self): memory = \"\" for point in self.memory: memory += \" \"", "of the ant. Params: - new_location: a list that contains the coordinates of", "False otherwise. ''' def set_memory(self, point): for p in self.memory: if(point.get_point() == p.get_point()):", "point object that has the higher pheromone. Return: - Point: the point with", "ant.update_location(res.x) ''' Method that updates the pheromone of the ants in the colony.", "sum of the pheromones of the list of points. Return: - float: the", "= list() ''' Method to clear the ant location. ''' def clear_location(self): self.current_localization", "that adds a point to the list that contains the location of the", "best_point = point return best_point ''' Method that returns the sum of the", "- evaporation_rate: A constant to control the evaporation of the pheromone. - num_iterations", "the colony. -float: the cost of the best point found by the colony.", "of the ant position. ''' def get_location(self): output_list = list() for point in", "bat. Return: - string: the string representation of the bat. ''' def __str__(self):", "returns the string representation of the bat. Return: - string: the string representation", "ant, cost): ant.update_pheromone(cost) for point_list in self.points: point_list.evaporate_pheromone(self.p) ''' Method in which the", "number_ants: The number of ants of the colony. - q: A constant. -", "''' Method in which the ants in the colony decides to move to", "Method that returns the string representation of the bat. Return: - string: the", "= ant return best_ant, cost ''' Method that does a local search around", "discrete points to sample. - interval: an interval to draw number from. -", "def set_point(self, point): self.point = point ''' Method that returns the string representation", "for ant in self.ants: ant_cost = (function(ant.get_location())) if(ant_cost < cost): cost = ant_cost", "ant in self.ants: ant.clear_location() if(random() > 1 - self.q): for point_list in self.points:", "output_list ''' Method to update the position of the ant. Params: - new_location:", "the string representation of the bat. ''' def __str__(self): memory = \"\" for", "the string representation of the point. ''' def __str__(self): return \"point: \" +", "control the evaporation of the pheromone. - num_iterations (optional): The number of iterations", "self.q = q self.p = evaporation_rate self.ants = [Ant(num_params) for _ in range(0,", "the ant position. ''' def get_location(self): output_list = list() for point in self.current_localization:", "that contains the location of the ant. ''' def assign_point(self, point): self.current_localization.append(point) '''", "= [Point(uniform(0, pi),1/2) for _ in range(0,int(n/2))] + [Point(uniform(0, 2*pi),1/2) for _ in", "2*pi),1/2) for _ in range(0,int(n/2))] return (theta) self.number_params = num_params self.num_iterations = num_iterations", "interval: an interval to draw number from. - number_ants: The number of ants", "= q self.p = evaporation_rate self.ants = [Ant(num_params) for _ in range(0, number_ants)]", "def __init__(self, memory_limit) -> None: self.memory = list() self.memory_limit = memory_limit self.current_localization =", "''' Class PointList. A list that contains points. ''' class PointsList(): ''' The", "number_ants, q, evaporation_rate, num_iterations = 50) -> None: def first_guess_linear(n): [Point(uniform(interval[0],interval[1]), 1/2) for", "point_list.evaporate_pheromone(self.p) ''' Method in which the ants in the colony decides to move", "return self.points ''' Method that evaporates the pheromones in the points. ''' def", "__init__(self, memory_limit) -> None: self.memory = list() self.memory_limit = memory_limit self.current_localization = list()", "of the objective function. - discrete_points: the number of discrete points to sample.", "e, sqrt,cos,pi ''' Class Point. A point is an object that has a", "it's location. ''' class Ant(): ''' The constructor of the class. Params: -", "the best ant. ''' def get_best_ant(self, function): best_ant = self.ants[0] cost = function(best_ant.get_location())", "options={\"maxiter\":5}) ant.update_location(res.x) ''' Method that updates the pheromone of the ants in the", "set a coordinate of the point. Params: - point: The coordinate of the", "in range(discrete_points)] theta = [Point(uniform(0, pi),1/2) for _ in range(0,int(n/2))] + [Point(uniform(0, 2*pi),1/2)", "Return: - Ant: the best ant in the colony. - float: the cost", "to the list that contains the location of the ant. ''' def assign_point(self,", "pheromone. - num_iterations (optional): The number of iterations of the algorithm. ''' def", "evaporate_pheromone(self, p): for point in self.points: point.set_pheromone((1-p)*point.get_pheromone()) ''' Class ACO. Class to run", "point. Params: - pheromone: The pheromone of the point. ''' def set_pheromone(self, pheromone):", "the point with the higher pheromone trail. ''' def get_best_point(self): best_point = Point(0,0)", "__str__(self): memory = \"\" for point in self.memory: memory += \" \" +", "ant.get_location(), method='COBYLA', options={\"maxiter\":5}) ant.update_location(res.x) ''' Method that updates the pheromone of the ants", "number of previous visited placed that an ant can remember. ''' def __init__(self,", "location in the ant memory. Params: - point: the point that will be", "''' def set_memory(self, point): for p in self.memory: if(point.get_point() == p.get_point()): return False", "memory. An ant can move, forget/remember previous visited places and return it's location.", "= num_params self.num_iterations = num_iterations self.discrete_points = discrete_points self.points = list() self.q =", "remember. ''' def __init__(self, memory_limit) -> None: self.memory = list() self.memory_limit = memory_limit", "Method that adds a point to the list that contains the location of", "pheromone of the current location point of the ant. Params: - error: The", "interval to draw number from. - number_ants: The number of ants of the", "the bat. ''' def __str__(self): memory = \"\" for point in self.memory: memory", "cost with respect to the cost function. Return: - Ant: the best ant", "a point to the list that contains the location of the ant. '''", "the cost of the best point found by the colony. ''' def run(self,fx):", "and \" + \"current location\" + location ''' Class PointList. A list that", "math import e, sqrt,cos,pi ''' Class Point. A point is an object that", "return False self.memory.append(point) if( len(self.memory) > self.memory_limit ): del self.memory[0] return True '''", "new location. ''' def update_location(self, new_location): for i in range(len(self.current_localization)): self.current_localization[i].set_point(new_location[i]) ''' Method", "[Ant(num_params) for _ in range(0, number_ants)] for _ in range(0,self.number_params): self.points.append(PointsList(first_guess_linear(discrete_points))) ''' Method", "): del self.memory[0] return True ''' Method that returns the string representation of", "an object that has a position and a pheromone that leads to the", "the coordinates of the new location. ''' def update_location(self, new_location): for i in", "of an ant. ''' def local_search(self, function): for ant in self.ants: res =", "an ant. ''' def local_search(self, function): for ant in self.ants: res = minimize(function,", "\" and \" + \"current location\" + location ''' Class PointList. A list", "Method that updates the pheromone of the current location point of the ant.", "coordinates of the ant location. Return: - list: the list of coordinates of", "self.points: ant_asigned = ant.set_memory(point_list.get_best_point()) ant.assign_point(point_list.get_best_point()) else: for point_list in self.points: for point in", "ant_asigned = ant.set_memory(point) if (ant_asigned): ant.assign_point(point) break ''' Method to run the PSO", "updates the pheromone of the current location point of the ant. Params: -", "constructor of the class. Params: - list_of_points: the list of points. ''' def", "the PSO heuristic over the objective function. Params: - fx: the cost function.", "memory_limit self.current_localization = list() ''' Method to clear the ant location. ''' def", "objective function. Params: - fx: the cost function. Return: -list: a list with", "places and return it's location. ''' class Ant(): ''' The constructor of the", "list with the best point find by the colony. -float: the cost of", "Params: - num_params: the number of dimentios of the objective function. - discrete_points:", "a memory and a limit for it's memory. An ant can move, forget/remember", "memory += \" \" + str(point) + \" \" location = \"\" for", "memory and a limit for it's memory. An ant can move, forget/remember previous", "- float: the total pf pheromones. ''' def get_total_pheromones(self): total = 0 for", "p.get_point()): return False self.memory.append(point) if( len(self.memory) > self.memory_limit ): del self.memory[0] return True", "Params: - error: The error induced by the best solution in the colony.", "''' Method that returns the string representation of the point. Return: - string:", "the position of the ant. Params: - new_location: a list that contains the", "the point object that has the higher pheromone. Return: - Point: the point", "colony optimization with respect of the given function. ''' class ACO(): ''' The", "None: self.memory = list() self.memory_limit = memory_limit self.current_localization = list() ''' Method to", "if(ant_cost < cost): cost = ant_cost best_ant = ant return best_ant, cost '''", "get the pheromone of the point. Return: - point: The pheromone of the", "objective function. - discrete_points: the number of discrete points to sample. - interval:", "+ str(self.pheromone) ''' Class Ant. An ant is an object that has a", "representation of the bat. ''' def __str__(self): memory = \"\" for point in", "has a position, a memory and a limit for it's memory. An ant", "pheromone trail. ''' def get_best_point(self): best_point = Point(0,0) for point in self.points: if(point.get_pheromone()", "by the colony. ''' def run(self,fx): self.probabilistic_construction() self.local_search(fx) best_ant, best_cost = self.get_best_ant(fx) best_location", "minimize from math import e, sqrt,cos,pi ''' Class Point. A point is an", "to get the coordinates of the point. Return: - point: The coordinates of", "1/2) for _ in range(discrete_points)] theta = [Point(uniform(0, pi),1/2) for _ in range(0,int(n/2))]", "total += point.get_pheromone() return total ''' Method that returns the list of points.", "ant return best_ant, cost ''' Method that does a local search around the", "to run the PSO heuristic over the objective function. Params: - fx: the", "representation of the bat. Return: - string: the string representation of the bat.", "best ant in the colony. - float: the cost of the best ant.", "number_ants)] for _ in range(0,self.number_params): self.points.append(PointsList(first_guess_linear(discrete_points))) ''' Method that returns the best ant", "of the ant. ''' def assign_point(self, point): self.current_localization.append(point) ''' Method that updates the", "Point(0,0) for point in self.points: if(point.get_pheromone() > best_point.get_pheromone()): best_point = point return best_point", "-float: the cost of the best point found by the colony. ''' def", "of points. ''' def get_list_points(self): return self.points ''' Method that evaporates the pheromones", "class. Params: - num_params: the number of dimentios of the objective function. -", "the point was added to the memory and False otherwise. ''' def set_memory(self,", "point in self.points: if(point.get_pheromone() > best_point.get_pheromone()): best_point = point return best_point ''' Method", "list of points. ''' def get_list_points(self): return self.points ''' Method that evaporates the", "with respect of the given function. ''' class ACO(): ''' The constructor of", "Params: - point: a coordinate. - pheromone: the pheromone that leads to the", "run the PSO heuristic over the objective function. Params: - fx: the cost", "== p.get_point()): return False self.memory.append(point) if( len(self.memory) > self.memory_limit ): del self.memory[0] return", "_ in range(discrete_points)] theta = [Point(uniform(0, pi),1/2) for _ in range(0,int(n/2))] + [Point(uniform(0,", "''' Method to get the coordinates of the point. Return: - point: The", "the coordinates of the ant location. Return: - list: the list of coordinates" ]
[ "= 0 while True: if self._devid not in TAPDevice._allocated_device_ids: break else: self._devid +=", "#!/usr/bin/env python3 import subprocess from threading import RLock class TAPDevice(object): \"\"\" This class", "TAPDevice(object): \"\"\" This class issues iproute2 commands to add and remove tap devices", "the tap device. After calling this function, subsequent calls to the objects should", "self._masterdevname = None subprocess.check_call([\"ip\", \"tuntap\", \"add\", \"name\", self._devname, \"mode\", \"tap\"]) subprocess.check_call([\"ip\", \"link\", \"set\",", "import RLock class TAPDevice(object): \"\"\" This class issues iproute2 commands to add and", "longer available\") return self._devname @property def master(self) -> str: if not self._active: raise", "calling this function, subsequent calls to the objects should not be made. \"\"\"", "self.free() raise def update_master(self, master: str): # This raises exception if master is", "= master @property def device(self) -> str: if not self._active: raise RuntimeError(\"Device is", "self._masterdevname = master @property def device(self) -> str: if not self._active: raise RuntimeError(\"Device", "raise RuntimeError(\"Device is no longer available\") return self._masterdevname def free(self): \"\"\" Free up", "def update_master(self, master: str): # This raises exception if master is not available", "commands to add and remove tap devices required for VM networking \"\"\" _allocated_device_ids", "= [] NAMING_SCHEME = \"tap{id}\" _global_network_lock = RLock() # protects the _allocated_device_ids list,", "subprocess.check_call([\"ip\", \"tuntap\", \"add\", \"name\", self._devname, \"mode\", \"tap\"]) subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"up\"]) try:", "class TAPDevice(object): \"\"\" This class issues iproute2 commands to add and remove tap", "subprocess from threading import RLock class TAPDevice(object): \"\"\" This class issues iproute2 commands", "try: self.update_master(master) except subprocess.CalledProcessError: self.free() raise def update_master(self, master: str): # This raises", "tap device. After calling this function, subsequent calls to the objects should not", "no longer available\") return self._devname @property def master(self) -> str: if not self._active:", "not available if not self._active: raise RuntimeError(\"Device is no longer available\") with TAPDevice._global_network_lock:", "not self._active: raise RuntimeError(\"Device is no longer available\") return self._devname @property def master(self)", "str): self._active = True with TAPDevice._global_network_lock: self._devid = 0 while True: if self._devid", "add and remove tap devices required for VM networking \"\"\" _allocated_device_ids = []", "while True: if self._devid not in TAPDevice._allocated_device_ids: break else: self._devid += 1 TAPDevice._allocated_device_ids.append(self._devid)", "up the tap device. After calling this function, subsequent calls to the objects", "= RLock() # protects the _allocated_device_ids list, and the adding and removing of", "str: if not self._active: raise RuntimeError(\"Device is no longer available\") return self._masterdevname def", "\"tap\"]) subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"up\"]) try: self.update_master(master) except subprocess.CalledProcessError: self.free() raise def", "\"link\", \"set\", self._devname, \"up\"]) try: self.update_master(master) except subprocess.CalledProcessError: self.free() raise def update_master(self, master:", "NAMING_SCHEME = \"tap{id}\" _global_network_lock = RLock() # protects the _allocated_device_ids list, and the", "subsequent calls to the objects should not be made. \"\"\" if not self._active:", "tap devices required for VM networking \"\"\" _allocated_device_ids = [] NAMING_SCHEME = \"tap{id}\"", "\"tuntap\", \"add\", \"name\", self._devname, \"mode\", \"tap\"]) subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"up\"]) try: self.update_master(master)", "available\") return self._masterdevname def free(self): \"\"\" Free up the tap device. After calling", "\"link\", \"set\", self._devname, \"down\"]) subprocess.check_call([\"ip\", \"tuntap\", \"del\", \"name\", self._devname, \"mode\", \"tap\"]) TAPDevice._allocated_device_ids.remove(self._devid) self._active", "with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"master\", master]) self._masterdevname = master @property def", "\"set\", self._devname, \"up\"]) try: self.update_master(master) except subprocess.CalledProcessError: self.free() raise def update_master(self, master: str):", "if not self._active: raise RuntimeError(\"Device is no longer available\") return self._masterdevname def free(self):", "self._active: raise RuntimeError(\"Device is no longer available\") return self._masterdevname def free(self): \"\"\" Free", "is no longer available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"down\"]) subprocess.check_call([\"ip\", \"tuntap\",", "_global_network_lock = RLock() # protects the _allocated_device_ids list, and the adding and removing", "adding and removing of tap devices def __init__(self, master: str): self._active = True", "def master(self) -> str: if not self._active: raise RuntimeError(\"Device is no longer available\")", "-> str: if not self._active: raise RuntimeError(\"Device is no longer available\") return self._masterdevname", "is no longer available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"master\", master]) self._masterdevname", "master: str): # This raises exception if master is not available if not", "RLock() # protects the _allocated_device_ids list, and the adding and removing of tap", "return self._masterdevname def free(self): \"\"\" Free up the tap device. After calling this", "longer available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"master\", master]) self._masterdevname = master", "def free(self): \"\"\" Free up the tap device. After calling this function, subsequent", "\"tap{id}\" _global_network_lock = RLock() # protects the _allocated_device_ids list, and the adding and", "networking \"\"\" _allocated_device_ids = [] NAMING_SCHEME = \"tap{id}\" _global_network_lock = RLock() # protects", "\"\"\" Free up the tap device. After calling this function, subsequent calls to", "\"add\", \"name\", self._devname, \"mode\", \"tap\"]) subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"up\"]) try: self.update_master(master) except", "list, and the adding and removing of tap devices def __init__(self, master: str):", "TAPDevice._global_network_lock: self._devid = 0 while True: if self._devid not in TAPDevice._allocated_device_ids: break else:", "True: if self._devid not in TAPDevice._allocated_device_ids: break else: self._devid += 1 TAPDevice._allocated_device_ids.append(self._devid) self._devname", "@property def device(self) -> str: if not self._active: raise RuntimeError(\"Device is no longer", "RuntimeError(\"Device is no longer available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"down\"]) subprocess.check_call([\"ip\",", "devices def __init__(self, master: str): self._active = True with TAPDevice._global_network_lock: self._devid = 0", "no longer available\") return self._masterdevname def free(self): \"\"\" Free up the tap device.", "self._devid += 1 TAPDevice._allocated_device_ids.append(self._devid) self._devname = TAPDevice.NAMING_SCHEME.format(id=self._devid) self._masterdevname = None subprocess.check_call([\"ip\", \"tuntap\", \"add\",", "devices required for VM networking \"\"\" _allocated_device_ids = [] NAMING_SCHEME = \"tap{id}\" _global_network_lock", "# protects the _allocated_device_ids list, and the adding and removing of tap devices", "def __init__(self, master: str): self._active = True with TAPDevice._global_network_lock: self._devid = 0 while", "master(self) -> str: if not self._active: raise RuntimeError(\"Device is no longer available\") return", "if not self._active: raise RuntimeError(\"Device is no longer available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\",", "not in TAPDevice._allocated_device_ids: break else: self._devid += 1 TAPDevice._allocated_device_ids.append(self._devid) self._devname = TAPDevice.NAMING_SCHEME.format(id=self._devid) self._masterdevname", "self._devname @property def master(self) -> str: if not self._active: raise RuntimeError(\"Device is no", "longer available\") return self._masterdevname def free(self): \"\"\" Free up the tap device. After", "\"\"\" if not self._active: raise RuntimeError(\"Device is no longer available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\",", "str: if not self._active: raise RuntimeError(\"Device is no longer available\") return self._devname @property", "raise RuntimeError(\"Device is no longer available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"down\"])", "self._active = True with TAPDevice._global_network_lock: self._devid = 0 while True: if self._devid not", "self._devid not in TAPDevice._allocated_device_ids: break else: self._devid += 1 TAPDevice._allocated_device_ids.append(self._devid) self._devname = TAPDevice.NAMING_SCHEME.format(id=self._devid)", "return self._devname @property def master(self) -> str: if not self._active: raise RuntimeError(\"Device is", "self._devid = 0 while True: if self._devid not in TAPDevice._allocated_device_ids: break else: self._devid", "= None subprocess.check_call([\"ip\", \"tuntap\", \"add\", \"name\", self._devname, \"mode\", \"tap\"]) subprocess.check_call([\"ip\", \"link\", \"set\", self._devname,", "After calling this function, subsequent calls to the objects should not be made.", "\"link\", \"set\", self._devname, \"master\", master]) self._masterdevname = master @property def device(self) -> str:", "update_master(self, master: str): # This raises exception if master is not available if", "\"mode\", \"tap\"]) subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"up\"]) try: self.update_master(master) except subprocess.CalledProcessError: self.free() raise", "self._devname = TAPDevice.NAMING_SCHEME.format(id=self._devid) self._masterdevname = None subprocess.check_call([\"ip\", \"tuntap\", \"add\", \"name\", self._devname, \"mode\", \"tap\"])", "made. \"\"\" if not self._active: raise RuntimeError(\"Device is no longer available\") with TAPDevice._global_network_lock:", "\"set\", self._devname, \"down\"]) subprocess.check_call([\"ip\", \"tuntap\", \"del\", \"name\", self._devname, \"mode\", \"tap\"]) TAPDevice._allocated_device_ids.remove(self._devid) self._active =", "exception if master is not available if not self._active: raise RuntimeError(\"Device is no", "is no longer available\") return self._masterdevname def free(self): \"\"\" Free up the tap", "self._devname, \"down\"]) subprocess.check_call([\"ip\", \"tuntap\", \"del\", \"name\", self._devname, \"mode\", \"tap\"]) TAPDevice._allocated_device_ids.remove(self._devid) self._active = False", "self._active: raise RuntimeError(\"Device is no longer available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname,", "issues iproute2 commands to add and remove tap devices required for VM networking", "self._active: raise RuntimeError(\"Device is no longer available\") return self._devname @property def master(self) ->", "+= 1 TAPDevice._allocated_device_ids.append(self._devid) self._devname = TAPDevice.NAMING_SCHEME.format(id=self._devid) self._masterdevname = None subprocess.check_call([\"ip\", \"tuntap\", \"add\", \"name\",", "str): # This raises exception if master is not available if not self._active:", "[] NAMING_SCHEME = \"tap{id}\" _global_network_lock = RLock() # protects the _allocated_device_ids list, and", "else: self._devid += 1 TAPDevice._allocated_device_ids.append(self._devid) self._devname = TAPDevice.NAMING_SCHEME.format(id=self._devid) self._masterdevname = None subprocess.check_call([\"ip\", \"tuntap\",", "is not available if not self._active: raise RuntimeError(\"Device is no longer available\") with", "remove tap devices required for VM networking \"\"\" _allocated_device_ids = [] NAMING_SCHEME =", "This raises exception if master is not available if not self._active: raise RuntimeError(\"Device", "= \"tap{id}\" _global_network_lock = RLock() # protects the _allocated_device_ids list, and the adding", "with TAPDevice._global_network_lock: self._devid = 0 while True: if self._devid not in TAPDevice._allocated_device_ids: break", "longer available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"down\"]) subprocess.check_call([\"ip\", \"tuntap\", \"del\", \"name\",", "except subprocess.CalledProcessError: self.free() raise def update_master(self, master: str): # This raises exception if", "required for VM networking \"\"\" _allocated_device_ids = [] NAMING_SCHEME = \"tap{id}\" _global_network_lock =", "and the adding and removing of tap devices def __init__(self, master: str): self._active", "master is not available if not self._active: raise RuntimeError(\"Device is no longer available\")", "TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"master\", master]) self._masterdevname = master @property def device(self)", "no longer available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"down\"]) subprocess.check_call([\"ip\", \"tuntap\", \"del\",", "tap devices def __init__(self, master: str): self._active = True with TAPDevice._global_network_lock: self._devid =", "break else: self._devid += 1 TAPDevice._allocated_device_ids.append(self._devid) self._devname = TAPDevice.NAMING_SCHEME.format(id=self._devid) self._masterdevname = None subprocess.check_call([\"ip\",", "\"set\", self._devname, \"master\", master]) self._masterdevname = master @property def device(self) -> str: if", "available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"master\", master]) self._masterdevname = master @property", "from threading import RLock class TAPDevice(object): \"\"\" This class issues iproute2 commands to", "if self._devid not in TAPDevice._allocated_device_ids: break else: self._devid += 1 TAPDevice._allocated_device_ids.append(self._devid) self._devname =", "self._devname, \"master\", master]) self._masterdevname = master @property def device(self) -> str: if not", "\"up\"]) try: self.update_master(master) except subprocess.CalledProcessError: self.free() raise def update_master(self, master: str): # This", "= True with TAPDevice._global_network_lock: self._devid = 0 while True: if self._devid not in", "python3 import subprocess from threading import RLock class TAPDevice(object): \"\"\" This class issues", "TAPDevice.NAMING_SCHEME.format(id=self._devid) self._masterdevname = None subprocess.check_call([\"ip\", \"tuntap\", \"add\", \"name\", self._devname, \"mode\", \"tap\"]) subprocess.check_call([\"ip\", \"link\",", "available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"down\"]) subprocess.check_call([\"ip\", \"tuntap\", \"del\", \"name\", self._devname,", "RuntimeError(\"Device is no longer available\") return self._masterdevname def free(self): \"\"\" Free up the", "if not self._active: raise RuntimeError(\"Device is no longer available\") return self._devname @property def", "class issues iproute2 commands to add and remove tap devices required for VM", "None subprocess.check_call([\"ip\", \"tuntap\", \"add\", \"name\", self._devname, \"mode\", \"tap\"]) subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"up\"])", "and remove tap devices required for VM networking \"\"\" _allocated_device_ids = [] NAMING_SCHEME", "def device(self) -> str: if not self._active: raise RuntimeError(\"Device is no longer available\")", "\"\"\" This class issues iproute2 commands to add and remove tap devices required", "RuntimeError(\"Device is no longer available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"master\", master])", "import subprocess from threading import RLock class TAPDevice(object): \"\"\" This class issues iproute2", "function, subsequent calls to the objects should not be made. \"\"\" if not", "for VM networking \"\"\" _allocated_device_ids = [] NAMING_SCHEME = \"tap{id}\" _global_network_lock = RLock()", "__init__(self, master: str): self._active = True with TAPDevice._global_network_lock: self._devid = 0 while True:", "@property def master(self) -> str: if not self._active: raise RuntimeError(\"Device is no longer", "VM networking \"\"\" _allocated_device_ids = [] NAMING_SCHEME = \"tap{id}\" _global_network_lock = RLock() #", "calls to the objects should not be made. \"\"\" if not self._active: raise", "if master is not available if not self._active: raise RuntimeError(\"Device is no longer", "the objects should not be made. \"\"\" if not self._active: raise RuntimeError(\"Device is", "device. After calling this function, subsequent calls to the objects should not be", "of tap devices def __init__(self, master: str): self._active = True with TAPDevice._global_network_lock: self._devid", "RLock class TAPDevice(object): \"\"\" This class issues iproute2 commands to add and remove", "the adding and removing of tap devices def __init__(self, master: str): self._active =", "subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"master\", master]) self._masterdevname = master @property def device(self) ->", "raises exception if master is not available if not self._active: raise RuntimeError(\"Device is", "protects the _allocated_device_ids list, and the adding and removing of tap devices def", "subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"down\"]) subprocess.check_call([\"ip\", \"tuntap\", \"del\", \"name\", self._devname, \"mode\", \"tap\"]) TAPDevice._allocated_device_ids.remove(self._devid)", "TAPDevice._allocated_device_ids.append(self._devid) self._devname = TAPDevice.NAMING_SCHEME.format(id=self._devid) self._masterdevname = None subprocess.check_call([\"ip\", \"tuntap\", \"add\", \"name\", self._devname, \"mode\",", "in TAPDevice._allocated_device_ids: break else: self._devid += 1 TAPDevice._allocated_device_ids.append(self._devid) self._devname = TAPDevice.NAMING_SCHEME.format(id=self._devid) self._masterdevname =", "True with TAPDevice._global_network_lock: self._devid = 0 while True: if self._devid not in TAPDevice._allocated_device_ids:", "<filename>mmvmm/tap_device.py #!/usr/bin/env python3 import subprocess from threading import RLock class TAPDevice(object): \"\"\" This", "master @property def device(self) -> str: if not self._active: raise RuntimeError(\"Device is no", "be made. \"\"\" if not self._active: raise RuntimeError(\"Device is no longer available\") with", "self._devname, \"mode\", \"tap\"]) subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"up\"]) try: self.update_master(master) except subprocess.CalledProcessError: self.free()", "subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"up\"]) try: self.update_master(master) except subprocess.CalledProcessError: self.free() raise def update_master(self,", "threading import RLock class TAPDevice(object): \"\"\" This class issues iproute2 commands to add", "subprocess.CalledProcessError: self.free() raise def update_master(self, master: str): # This raises exception if master", "not self._active: raise RuntimeError(\"Device is no longer available\") return self._masterdevname def free(self): \"\"\"", "raise RuntimeError(\"Device is no longer available\") return self._devname @property def master(self) -> str:", "0 while True: if self._devid not in TAPDevice._allocated_device_ids: break else: self._devid += 1", "no longer available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"master\", master]) self._masterdevname =", "_allocated_device_ids list, and the adding and removing of tap devices def __init__(self, master:", "device(self) -> str: if not self._active: raise RuntimeError(\"Device is no longer available\") return", "removing of tap devices def __init__(self, master: str): self._active = True with TAPDevice._global_network_lock:", "# This raises exception if master is not available if not self._active: raise", "not self._active: raise RuntimeError(\"Device is no longer available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\",", "= TAPDevice.NAMING_SCHEME.format(id=self._devid) self._masterdevname = None subprocess.check_call([\"ip\", \"tuntap\", \"add\", \"name\", self._devname, \"mode\", \"tap\"]) subprocess.check_call([\"ip\",", "This class issues iproute2 commands to add and remove tap devices required for", "Free up the tap device. After calling this function, subsequent calls to the", "free(self): \"\"\" Free up the tap device. After calling this function, subsequent calls", "\"\"\" _allocated_device_ids = [] NAMING_SCHEME = \"tap{id}\" _global_network_lock = RLock() # protects the", "\"name\", self._devname, \"mode\", \"tap\"]) subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"up\"]) try: self.update_master(master) except subprocess.CalledProcessError:", "iproute2 commands to add and remove tap devices required for VM networking \"\"\"", "to add and remove tap devices required for VM networking \"\"\" _allocated_device_ids =", "RuntimeError(\"Device is no longer available\") return self._devname @property def master(self) -> str: if", "self._masterdevname def free(self): \"\"\" Free up the tap device. After calling this function,", "is no longer available\") return self._devname @property def master(self) -> str: if not", "-> str: if not self._active: raise RuntimeError(\"Device is no longer available\") return self._devname", "master: str): self._active = True with TAPDevice._global_network_lock: self._devid = 0 while True: if", "raise RuntimeError(\"Device is no longer available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"master\",", "this function, subsequent calls to the objects should not be made. \"\"\" if", "\"master\", master]) self._masterdevname = master @property def device(self) -> str: if not self._active:", "master]) self._masterdevname = master @property def device(self) -> str: if not self._active: raise", "1 TAPDevice._allocated_device_ids.append(self._devid) self._devname = TAPDevice.NAMING_SCHEME.format(id=self._devid) self._masterdevname = None subprocess.check_call([\"ip\", \"tuntap\", \"add\", \"name\", self._devname,", "TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"down\"]) subprocess.check_call([\"ip\", \"tuntap\", \"del\", \"name\", self._devname, \"mode\", \"tap\"])", "available\") return self._devname @property def master(self) -> str: if not self._active: raise RuntimeError(\"Device", "not be made. \"\"\" if not self._active: raise RuntimeError(\"Device is no longer available\")", "objects should not be made. \"\"\" if not self._active: raise RuntimeError(\"Device is no", "with TAPDevice._global_network_lock: subprocess.check_call([\"ip\", \"link\", \"set\", self._devname, \"down\"]) subprocess.check_call([\"ip\", \"tuntap\", \"del\", \"name\", self._devname, \"mode\",", "self.update_master(master) except subprocess.CalledProcessError: self.free() raise def update_master(self, master: str): # This raises exception", "available if not self._active: raise RuntimeError(\"Device is no longer available\") with TAPDevice._global_network_lock: subprocess.check_call([\"ip\",", "_allocated_device_ids = [] NAMING_SCHEME = \"tap{id}\" _global_network_lock = RLock() # protects the _allocated_device_ids", "self._devname, \"up\"]) try: self.update_master(master) except subprocess.CalledProcessError: self.free() raise def update_master(self, master: str): #", "raise def update_master(self, master: str): # This raises exception if master is not", "and removing of tap devices def __init__(self, master: str): self._active = True with", "should not be made. \"\"\" if not self._active: raise RuntimeError(\"Device is no longer", "TAPDevice._allocated_device_ids: break else: self._devid += 1 TAPDevice._allocated_device_ids.append(self._devid) self._devname = TAPDevice.NAMING_SCHEME.format(id=self._devid) self._masterdevname = None", "to the objects should not be made. \"\"\" if not self._active: raise RuntimeError(\"Device", "the _allocated_device_ids list, and the adding and removing of tap devices def __init__(self," ]
[ "True def to_numpy(*tensors: T) -> ARRAYS: params = [param.detach().cpu().numpy() if type(param) is T", "for param in tensors] return params def create_mapper(mask: T) -> T: mapper =", "[1, 2, 3], [4, 5, 6], [5, 7, 6], [0, 1, 5], [0,", "vs alpha = compute_face_areas(triangles)[0] / areas triangles[:, ind] = recover[:, ind] return alpha", "*features: T) -> Union[T, TS]: samples, face_ids, uvw = sample_on_mesh(mesh, num_samples, sample_s=sample_s) if", "face_areas: TN = None, sample_s: SampleBy = SampleBy.HYB) -> TNS: vs, faces =", "in_place: bool = True) -> T_Mesh: vs, faces = mesh if not in_place:", "vs.clone() vs = to_center(vs) norm = vs.norm(2, dim=1).max() vs *= scale * norm", "barycentric = torch.stack(barycentric, dim=1) # assert barycentric.sum(1).max().item() <= 1 + EPSILON return barycentric.to(device,", "gaussian_curvature(mesh: T_Mesh) -> T: gc = igl.gaussian_curvature(*mesh) return gc @igl_prepare(torch.float32) def per_vertex_normals_igl(mesh: T_Mesh,", "-> ARRAYS: params = [param.detach().cpu().numpy() if type(param) is T else param for param", "vs *= scale return vs, faces def to_unit_cube(*meshes: T_Mesh_T, scale=1, in_place: bool =", "values def get_faces_normals(mesh: Union[T_Mesh, T]) -> T: if type(mesh) is not T: vs,", "float, in_place: bool = True) -> T_Mesh: vs, faces = mesh if not", "uvw is None: fe_iner = fe[face_ids] else: vs_ids = mesh[1][face_ids] fe_unrolled = fe[vs_ids]", "get_edges_ind(mesh: T_Mesh) -> T: vs, faces = mesh raw_edges = torch.cat([faces[:, [i, (i", "5], [0, 4, 2], [2, 4, 6]] return torch.tensor(vs, dtype=torch.float32), torch.tensor(faces, dtype=torch.int64) def", "min_vals).max() / 2 center = (max_vals + min_vals) / 2 meshes_ = []", "get_samples(mesh: T_Mesh, num_samples: int, sample_s: SampleBy, *features: T) -> Union[T, TS]: samples, face_ids,", "in_place: bool = True) -> Tuple[Union[T_Mesh_T, Tuple[T_Mesh_T, ...]], Tuple[T, float]]: remove_me = 0", "mesh[1][face_ids] fe_unrolled = fe[vs_ids] fe_iner = torch.einsum('sad,sa->sd', fe_unrolled, uvw) # if to_squeeze: #", "= torch.einsum('fad,fna->fnd', vs[faces], uvw) return samples, uvw class SampleBy(Enum): AREAS = 0 FACES", "mesh_center(mesh: T_Mesh): return mesh[0].mean(0) def to_center(vs): max_vals = vs.max(0)[0] min_vals = vs.min(0)[0] center", "2, 7], [1, 3, 5], [3, 7, 5], [0, 4, 2], [2, 4,", "= weights.unsqueeze(0) face_ids = face_ids.unsqueeze(0) vs = torch.einsum('nad,na->nd', triangles[face_ids], weights) if to_squeeze: vs", "mask = (u + v).gt(1) u[mask], v[mask] = -u[mask] + 1, -v[mask] +", "for val in values] if len(values) == 1: return values[0] return values def", "T, weights: T) -> T: if type(mesh) is not T: triangles: T =", "def sample_on_mesh(mesh: T_Mesh, num_samples: int, face_areas: TN = None, sample_s: SampleBy = SampleBy.HYB)", "vs.device) samples = torch.einsum('fad,fna->fnd', vs[faces], uvw) return samples, uvw class SampleBy(Enum): AREAS =", "is None: face_areas, _ = compute_face_areas(mesh) face_areas[torch.isnan(face_areas)] = 0 weighted_p.append(face_areas / face_areas.sum()) if", "1 uvw = torch.stack([u, v, w], dim=len(shape)) return uvw def get_sampled_fe(fe: T, mesh:", "raw_edges = raw_edges[0].cpu().numpy() edges = {(int(edge[0]), int(edge[1])) for edge in raw_edges} edges =", "check_circle_angles(mesh: T_Mesh, center_ind: int, select: T) -> bool: vs, _ = mesh all_vecs", "out = igl.principal_curvature(*mesh) min_dir, max_dir, min_val, max_val = out return min_dir, max_dir, min_val,", "vs def check_circle_angles(mesh: T_Mesh, center_ind: int, select: T) -> bool: vs, _ =", "uvw) for fe in features] return samples, face_ids, uvw def find_barycentric(vs: T, triangles:", "[w, 0, h], [0, d, h], [w, d, h]] faces = [[0, 2,", "sample_s: SampleBy = SampleBy.HYB) -> TNS: vs, faces = mesh if faces is", "dtype=dtypes[0]) def to_torch_multi(result, device): return [torch.from_numpy(r).to(device, dtype=dtype) for r, dtype in zip(result, dtypes)]", "None: _, normals = compute_face_areas(triangle) select = torch.arange(3) d_vs = vs_mid[:, None, :]", "vs[faces] else: vs_faces = mesh if vs_faces.shape[-1] == 2: vs_faces = torch.cat( (vs_faces,", "mesh[0][mesh[1]] else: triangles: T = mesh to_squeeze = weights.dim() == 1 if to_squeeze:", "-> Union[T, TS]: samples, face_ids, uvw = sample_on_mesh(mesh, num_samples, sample_s=sample_s) if len(features) >", "= torch.from_numpy(winding_numbers) inside_outside = winding_numbers.lt(.5).float() * 2 - 1 return inside_outside.to(device) @igl_prepare(torch.float32) def", "1: meshes_ = meshes_[0] return meshes_, (center, scale) def get_edges_ind(mesh: T_Mesh) -> T:", "max_val = max([val.max().item() for val in values]) min_val = min([val.min().item() for val in", "wrapper(*args, **kwargs): mesh = args[0] device, dtype = mesh[0].device, mesh[0].dtype vs, faces =", "= mesh if not in_place: vs = vs.clone() vs -= center[None, :] vs", "Union[T, TS]: samples, face_ids, uvw = sample_on_mesh(mesh, num_samples, sample_s=sample_s) if len(features) > 0:", "= 1 face_normals = face_normals / face_areas_[:, None] face_areas = 0.5 * face_areas", "scale = float(scale / max_range) for mesh in meshes: vs_, faces_ = scale_from_ref(mesh,", "in place def to_unit_edge(*meshes: T_Mesh) -> Tuple[Union[T_Mesh, Tuple[T_Mesh, ...]], Tuple[T, float]]: ref =", "torch.cat([faces[:, [i, (i + 1) % 3]] for i in range(3)]).sort() raw_edges =", ":].to(vs.device) vs /= ratio if len(meshes) == 1: meshes = meshes[0] return meshes,", "np.pi).abs() < EPSILON def vs_over_triangle(vs_mid: T, triangle: T, normals=None) -> T: if vs_mid.dim()", "return values def get_faces_normals(mesh: Union[T_Mesh, T]) -> T: if type(mesh) is not T:", "mesh: T_Mesh, face_ids: T, uvw: TN) -> T: # to_squeeze = if fe.dim()", "out.append(clone(*t)) return out def get_box(w: float, h: float, d: float) -> T_Mesh: vs", "vs.to(device, dtype=torch.float64) triangles = triangles.to(device, dtype=torch.float64) areas, _ = compute_face_areas(triangles) recover = triangles.clone()", "vs, faces def scale_from_ref(mesh: T_Mesh, center: T, scale: float, in_place: bool = True)", "in tensors: if type(t) is T: out.append(t.clone()) else: out.append(clone(*t)) return out def get_box(w:", "min_val = min([val.min().item() for val in values]) scale = max_val - min_val values", "T_Mesh, edges_ind: TN = None) -> T: vs, faces = mesh if edges_ind", "face_areas_[:, None] face_areas = 0.5 * face_areas return face_areas, face_normals def check_sign_area(*meshes: T_Mesh)", "= sample_uvw([num_samples], vs.device) samples = torch.einsum('sf,sfd->sd', uvw, vs[chosen_faces]) return samples, chosen_faces_inds, uvw def", "return normals @igl_prepare(torch.float32, torch.int64) def remove_duplicate_vertices(mesh: T_Mesh, epsilon=1e-7) -> T_Mesh: vs, _, _,", "uvw = sample_on_mesh(mesh, num_samples, sample_s=sample_s) if len(features) > 0: samples = [samples] +", "D): u, v = torch.rand(*shape, device=device), torch.rand(*shape, device=device) mask = (u + v).gt(1)", "/ 2 meshes_ = [] scale = float(scale / max_range) for mesh in", "/ face_areas.sum()) if sample_s == SampleBy.FACES or sample_s == SampleBy.HYB: weighted_p.append(torch.ones(mesh[1].shape[0], device=mesh[0].device)) chosen_faces_inds", "t.norm(2, dim=1)[:, None] return t def interpolate_vs(mesh: T_Mesh, faces_inds: T, weights: T) ->", "vs = vs.to(device, dtype=torch.float64) triangles = triangles.to(device, dtype=torch.float64) areas, _ = compute_face_areas(triangles) recover", "i in range(3)] barycentric = torch.stack(barycentric, dim=1) # assert barycentric.sum(1).max().item() <= 1 +", "get_edges_ind(mesh) edges = vs[edges_ind] return torch.norm(edges[:, 0] - edges[:, 1], 2, dim=1) #", "return alpha device, dtype = vs.device, vs.dtype vs = vs.to(device, dtype=torch.float64) triangles =", "= vs.norm(2, dim=1).max() vs *= scale * norm ** -1 return vs, faces", "tuple or type(tensors) is List: out.append(to(list(tensor), device)) else: out.append(tensor) if len(tensors) == 1:", "T): t = t / t.norm(2, dim=1)[:, None] return t def interpolate_vs(mesh: T_Mesh,", "max([val.max().item() for val in values]) min_val = min([val.min().item() for val in values]) scale", "fe_unrolled = fe[vs_ids] fe_iner = torch.einsum('sad,sa->sd', fe_unrolled, uvw) # if to_squeeze: # fe_iner", "return meshes, (center, ratio) def to(tensors, device: D) -> Union[T_Mesh, TS, T]: out", "0: samples = [samples] + [get_sampled_fe(fe, mesh, face_ids, uvw) for fe in features]", "mesh in meshes: vs_, faces_ = scale_from_ref(mesh, center, scale) meshes_.append(vs_ if faces_ is", "d, 0], [w, d, 0], [0, 0, h], [w, 0, h], [0, d,", "min_vals = vs.min(0)[0] max_range = (max_vals - min_vals).max() / 2 center = (max_vals", "Tuple[T_Mesh, ...]], Tuple[T, float]]: ref = meshes[0] center = ref[0].mean(0) ratio = edge_lengths(ref).mean().item()", "device=vs_faces.device)), dim=2) face_normals = torch.cross(vs_faces[:, 1, :] - vs_faces[:, 0, :], vs_faces[:, 2,", "torch.einsum('fad,fna->fnd', vs[faces], uvw) return samples, uvw class SampleBy(Enum): AREAS = 0 FACES =", "num_samples: int, face_areas: TN = None, sample_s: SampleBy = SampleBy.HYB) -> TNS: vs,", "check, uv = igl.lscm(*mesh, boundary_indices, boundary_coordinates) return uv def interpulate_vs(mesh: T_Mesh, faces_inds: T,", "vs def to_unit_sphere(mesh: T_Mesh, in_place: bool = True, scale=1.) -> T_Mesh: vs, faces", "1 return inside_outside.to(device) @igl_prepare(torch.float32) def lscm(mesh: T_Mesh, boundary_indices: T, boundary_coordinates: T) -> T:", "torch.zeros(*vs_faces.shape[:2], 1, dtype=vs_faces.dtype, device=vs_faces.device)), dim=2) face_normals = torch.cross(vs_faces[:, 1, :] - vs_faces[:, 0,", "2 vs -= center[None, :] return vs def to_unit_sphere(mesh: T_Mesh, in_place: bool =", "7], [1, 3, 5], [3, 7, 5], [0, 4, 2], [2, 4, 6]]", "0], [0, d, 0], [w, d, 0], [0, 0, h], [w, 0, h],", "0, 0], [w, 0, 0], [0, d, 0], [w, d, 0], [0, 0,", "float, h: float, d: float) -> T_Mesh: vs = [[0, 0, 0], [w,", "1)[:, None] all_vecs = torch.cat([all_vecs, all_vecs[:1]], dim=0) all_cos = torch.einsum('nd,nd->n', all_vecs[1:], all_vecs[:-1]) all_angles", "winding_number_igl(mesh: T_Mesh, query: T) -> T: query = query.cpu().numpy() return igl.fast_winding_number_for_meshes(*mesh, query) @igl_prepare(torch.float32,", "return torch.tensor(vs, dtype=torch.float32), torch.tensor(faces, dtype=torch.int64) def normalize(t: T): t = t / t.norm(2,", "vs, faces = mesh if faces is None: # sample from pc uvw", "= sample_uvw([faces.shape[0], num_samples], vs.device) samples = torch.einsum('fad,fna->fnd', vs[faces], uvw) return samples, uvw class", "custom_types import * from constants import EPSILON import igl def scale_all(*values: T): max_val", "meshes_[0] return meshes_, (center, scale) def get_edges_ind(mesh: T_Mesh) -> T: vs, faces =", "mesh raw_edges = torch.cat([faces[:, [i, (i + 1) % 3]] for i in", "def to_center(vs): max_vals = vs.max(0)[0] min_vals = vs.min(0)[0] center = (max_vals + min_vals)", "def sample_on_faces(mesh: T_Mesh, num_samples: int) -> TS: vs, faces = mesh uvw =", "uvw) return samples, uvw class SampleBy(Enum): AREAS = 0 FACES = 1 HYB", "= get_edges_ind(mesh) edges = vs[edges_ind] return torch.norm(edges[:, 0] - edges[:, 1], 2, dim=1)", "def vs_over_triangle(vs_mid: T, triangle: T, normals=None) -> T: if vs_mid.dim() == 1: vs_mid", "2 - 1 return inside_outside.to(device) @igl_prepare(torch.float32) def lscm(mesh: T_Mesh, boundary_indices: T, boundary_coordinates: T)", "recover = triangles.clone() barycentric = [compute_barycentric(i) for i in range(3)] barycentric = torch.stack(barycentric,", "/ max_range) for mesh in meshes: vs_, faces_ = scale_from_ref(mesh, center, scale) meshes_.append(vs_", "T else mesh for mesh in meshes] vs, faces = meshes[0] max_vals =", "device = points.device points = points.numpy() vs, faces = mesh[0].numpy(), mesh[1].numpy() winding_numbers =", "= 0) -> T: normals = igl.per_vertex_normals(*mesh, weighting) return normals @igl_prepare(torch.float32, torch.int64) def", "T: vs = mesh[0][mesh[1][faces_inds]] vs = vs * weights[:, :, None] return vs.sum(1)", "TN = None) -> T: vs, faces = mesh if edges_ind is None:", "return vs.sum(1) def sample_uvw(shape, device: D): u, v = torch.rand(*shape, device=device), torch.rand(*shape, device=device)", "return barycentric.to(device, dtype=dtype) def from_barycentric(mesh: Union[T_Mesh, T], face_ids: T, weights: T) -> T:", "wrapper def to_torch_singe(result, device): return torch.from_numpy(result).to(device, dtype=dtypes[0]) def to_torch_multi(result, device): return [torch.from_numpy(r).to(device, dtype=dtype)", "faces = mesh if not in_place: vs = vs.clone() vs -= center[None, :]", "max_val = out return min_dir, max_dir, min_val, max_val def get_inside_outside(points: T, mesh: T_Mesh)", "1: return out[0] else: return tuple(out) def clone(*tensors: Union[T, TS]) -> Union[TS, T_Mesh]:", "-> T: vs, faces = mesh raw_edges = torch.cat([faces[:, [i, (i + 1)", "= float(scale / max_range) for mesh in meshes: vs_, faces_ = scale_from_ref(mesh, center,", "else (vs_, faces_)) if len(meshes_) == 1: meshes_ = meshes_[0] return meshes_, (center,", "mesh for mesh in meshes] vs, faces = meshes[0] max_vals = vs.max(0)[0] min_vals", "= None, sample_s: SampleBy = SampleBy.HYB) -> TNS: vs, faces = mesh if", "normals is None: _, normals = compute_face_areas(triangle) select = torch.arange(3) d_vs = vs_mid[:,", "to_center(vs): max_vals = vs.max(0)[0] min_vals = vs.min(0)[0] center = (max_vals + min_vals) /", "faces = mesh if not in_place: vs = vs.clone() vs = to_center(vs) norm", "faces def to_unit_cube(*meshes: T_Mesh_T, scale=1, in_place: bool = True) -> Tuple[Union[T_Mesh_T, Tuple[T_Mesh_T, ...]],", "return t def interpolate_vs(mesh: T_Mesh, faces_inds: T, weights: T) -> T: vs =", "max_range = (max_vals - min_vals).max() / 2 center = (max_vals + min_vals) /", "else: return tuple(out) def clone(*tensors: Union[T, TS]) -> Union[TS, T_Mesh]: out = []", ":] - vs_faces[:, 1, :]) return face_normals def compute_face_areas(mesh: Union[T_Mesh, T]) -> TS:", "class SampleBy(Enum): AREAS = 0 FACES = 1 HYB = 2 def sample_on_mesh(mesh:", "= mesh to_squeeze = weights.dim() == 1 if to_squeeze: weights = weights.unsqueeze(0) face_ids", "FACES = 1 HYB = 2 def sample_on_mesh(mesh: T_Mesh, num_samples: int, face_areas: TN", "SampleBy(Enum): AREAS = 0 FACES = 1 HYB = 2 def sample_on_mesh(mesh: T_Mesh,", "T]) -> TS: face_normals = get_faces_normals(mesh) face_areas = torch.norm(face_normals, p=2, dim=1) face_areas_ =", "def wrapper(*args, **kwargs): mesh = args[0] device, dtype = mesh[0].device, mesh[0].dtype vs, faces", "1, :] - vs_faces[:, 0, :], vs_faces[:, 2, :] - vs_faces[:, 1, :])", "sample_s == SampleBy.HYB: chosen_faces_inds = torch.cat(chosen_faces_inds, dim=0) chosen_faces = faces[chosen_faces_inds] uvw = sample_uvw([num_samples],", "dim=2) all_dots = torch.einsum('nd,nad->na', normals, all_cross) is_over = all_dots.ge(0).long().sum(1).eq(3) return is_over def igl_prepare(*dtypes):", "= mesh if vs_faces.shape[-1] == 2: vs_faces = torch.cat( (vs_faces, torch.zeros(*vs_faces.shape[:2], 1, dtype=vs_faces.dtype,", "face_ids = face_ids.unsqueeze(0) vs = torch.einsum('nad,na->nd', triangles[face_ids], weights) if to_squeeze: vs = vs.squeeze(0)", "(max_vals - min_vals).max() / 2 center = (max_vals + min_vals) / 2 meshes_", "TS: vs, faces = mesh uvw = sample_uvw([faces.shape[0], num_samples], vs.device) samples = torch.einsum('fad,fna->fnd',", "-= center[None, :] return vs def to_unit_sphere(mesh: T_Mesh, in_place: bool = True, scale=1.)", "get_faces_normals(mesh) if not face_normals[:, 2].gt(0).all(): return False return True def to_numpy(*tensors: T) ->", "all_cross) is_over = all_dots.ge(0).long().sum(1).eq(3) return is_over def igl_prepare(*dtypes): def decoder(func): def wrapper(*args, **kwargs):", "4, 2], [2, 4, 6]] return torch.tensor(vs, dtype=torch.float32), torch.tensor(faces, dtype=torch.int64) def normalize(t: T):", "faces_)) if len(meshes_) == 1: meshes_ = meshes_[0] return meshes_, (center, scale) def", "float]]: ref = meshes[0] center = ref[0].mean(0) ratio = edge_lengths(ref).mean().item() for mesh in", "weighted_p.append(face_areas / face_areas.sum()) if sample_s == SampleBy.FACES or sample_s == SampleBy.HYB: weighted_p.append(torch.ones(mesh[1].shape[0], device=mesh[0].device))", "= (max_vals + min_vals) / 2 meshes_ = [] scale = float(scale /", "return vs def to_unit_sphere(mesh: T_Mesh, in_place: bool = True, scale=1.) -> T_Mesh: vs,", "= t / t.norm(2, dim=1)[:, None] return t def interpolate_vs(mesh: T_Mesh, faces_inds: T,", "def to_unit_cube(*meshes: T_Mesh_T, scale=1, in_place: bool = True) -> Tuple[Union[T_Mesh_T, Tuple[T_Mesh_T, ...]], Tuple[T,", "scale return vs, faces def to_unit_cube(*meshes: T_Mesh_T, scale=1, in_place: bool = True) ->", "== SampleBy.AREAS or sample_s == SampleBy.HYB: if face_areas is None: face_areas, _ =", "0, h], [w, 0, h], [0, d, h], [w, d, h]] faces =", "is None: _, normals = compute_face_areas(triangle) select = torch.arange(3) d_vs = vs_mid[:, None,", "T_Mesh) -> TS: out = igl.principal_curvature(*mesh) min_dir, max_dir, min_val, max_val = out return", "int): if mesh[1].shape[0] <= num_faces: return mesh vs, faces, _ = igl.remove_duplicates(*mesh, 1e-8)", "vs.sum(1) def sample_uvw(shape, device: D): u, v = torch.rand(*shape, device=device), torch.rand(*shape, device=device) mask", "mesh = args[0] device, dtype = mesh[0].device, mesh[0].dtype vs, faces = to_numpy(*mesh) result", "len(meshes) == 1: meshes = meshes[0] return meshes, (center, ratio) def to(tensors, device:", "if face_areas is None: face_areas, _ = compute_face_areas(mesh) face_areas[torch.isnan(face_areas)] = 0 weighted_p.append(face_areas /", "0, h], [0, d, h], [w, d, h]] faces = [[0, 2, 1],", "= mesh[0].numpy(), mesh[1].numpy() winding_numbers = igl.fast_winding_number_for_meshes(vs, faces, points) winding_numbers = torch.from_numpy(winding_numbers) inside_outside =", "1 else to_torch_multi return wrapper def to_torch_singe(result, device): return torch.from_numpy(result).to(device, dtype=dtypes[0]) def to_torch_multi(result,", "< num_samples: chosen_faces_inds = torch.arange(vs.shape[0]) else: chosen_faces_inds = torch.argsort(torch.rand(vs.shape[0]))[:num_samples] samples = vs[chosen_faces_inds] else:", "is T: out.append(tensor.to(device, )) elif type(tensor) is tuple or type(tensors) is List: out.append(to(list(tensor),", "uvw def find_barycentric(vs: T, triangles: T) -> T: def compute_barycentric(ind): triangles[:, ind] =", "= vs.squeeze(0) return vs def check_circle_angles(mesh: T_Mesh, center_ind: int, select: T) -> bool:", "vs.dtype vs = vs.to(device, dtype=torch.float64) triangles = triangles.to(device, dtype=torch.float64) areas, _ = compute_face_areas(triangles)", "3]] for i in range(3)]).sort() raw_edges = raw_edges[0].cpu().numpy() edges = {(int(edge[0]), int(edge[1])) for", "uvw class SampleBy(Enum): AREAS = 0 FACES = 1 HYB = 2 def", "dim=0) all_cos = torch.einsum('nd,nd->n', all_vecs[1:], all_vecs[:-1]) all_angles = torch.acos_(all_cos) all_angles = all_angles.sum() return", "if not in_place: vs = vs.clone() vs = to_center(vs) norm = vs.norm(2, dim=1).max()", "= triangles.clone() barycentric = [compute_barycentric(i) for i in range(3)] barycentric = torch.stack(barycentric, dim=1)", "vs = mesh[0][mesh[1][faces_inds]] vs = vs * weights[:, :, None] return vs.sum(1) def", "= None if vs.shape[0] < num_samples: chosen_faces_inds = torch.arange(vs.shape[0]) else: chosen_faces_inds = torch.argsort(torch.rand(vs.shape[0]))[:num_samples]", "T: mapper = torch.zeros(mask.shape[0], dtype=torch.int64, device=mask.device) - 1 mapper[mask] = torch.arange(mask.sum().item(), device=mask.device) return", "# if to_squeeze: # fe_iner = fe_iner.squeeze_(1) return fe_iner def sample_on_faces(mesh: T_Mesh, num_samples:", "7], [3, 2, 7], [1, 3, 5], [3, 7, 5], [0, 4, 2],", "= [[0, 2, 1], [1, 2, 3], [4, 5, 6], [5, 7, 6],", "meshes_, (center, scale) def get_edges_ind(mesh: T_Mesh) -> T: vs, faces = mesh raw_edges", "= 1 HYB = 2 def sample_on_mesh(mesh: T_Mesh, num_samples: int, face_areas: TN =", ":], vs_faces[:, 2, :] - vs_faces[:, 1, :]) return face_normals def compute_face_areas(mesh: Union[T_Mesh,", "= 0 weighted_p.append(face_areas / face_areas.sum()) if sample_s == SampleBy.FACES or sample_s == SampleBy.HYB:", "faces = to_numpy(*mesh) result = func((vs, faces), *args[1:], **kwargs) return to_torch(result, device) to_torch", "faces is None: # sample from pc uvw = None if vs.shape[0] <", "mesh in meshes: vs, _ = mesh vs -= center[None, :].to(vs.device) vs /=", "-> T: mapper = torch.zeros(mask.shape[0], dtype=torch.int64, device=mask.device) - 1 mapper[mask] = torch.arange(mask.sum().item(), device=mask.device)", "- edges[:, 1], 2, dim=1) # in place def to_unit_edge(*meshes: T_Mesh) -> Tuple[Union[T_Mesh,", "# to_squeeze = if fe.dim() == 1: fe = fe.unsqueeze(1) if uvw is", "triangles: T = mesh to_squeeze = weights.dim() == 1 if to_squeeze: weights =", "areas triangles[:, ind] = recover[:, ind] return alpha device, dtype = vs.device, vs.dtype", "all_dots.ge(0).long().sum(1).eq(3) return is_over def igl_prepare(*dtypes): def decoder(func): def wrapper(*args, **kwargs): mesh = args[0]", "= [(val - min_val) / scale for val in values] if len(values) ==", "to_squeeze = weights.dim() == 1 if to_squeeze: weights = weights.unsqueeze(0) face_ids = face_ids.unsqueeze(0)", "None: face_areas, _ = compute_face_areas(mesh) face_areas[torch.isnan(face_areas)] = 0 weighted_p.append(face_areas / face_areas.sum()) if sample_s", "uv = igl.lscm(*mesh, boundary_indices, boundary_coordinates) return uv def interpulate_vs(mesh: T_Mesh, faces_inds: T, weights:", "u[mask], v[mask] = -u[mask] + 1, -v[mask] + 1 w = -u -", "torch.float32, torch.float32) def principal_curvature(mesh: T_Mesh) -> TS: out = igl.principal_curvature(*mesh) min_dir, max_dir, min_val,", "gc @igl_prepare(torch.float32) def per_vertex_normals_igl(mesh: T_Mesh, weighting: int = 0) -> T: normals =", "= mesh vs_faces = vs[faces] else: vs_faces = mesh if vs_faces.shape[-1] == 2:", "= all_angles.sum() return (all_angles - 2 * np.pi).abs() < EPSILON def vs_over_triangle(vs_mid: T,", "ref[0].mean(0) ratio = edge_lengths(ref).mean().item() for mesh in meshes: vs, _ = mesh vs", "vs_mid.dim() == 1: vs_mid = vs_mid.unsqueeze(0) triangle = triangle.unsqueeze(0) if normals is None:", "3, 5], [3, 7, 5], [0, 4, 2], [2, 4, 6]] return torch.tensor(vs,", "1 w = -u - v + 1 uvw = torch.stack([u, v, w],", "in tensors: if type(tensor) is T: out.append(tensor.to(device, )) elif type(tensor) is tuple or", "= torch.einsum('sad,sa->sd', fe_unrolled, uvw) # if to_squeeze: # fe_iner = fe_iner.squeeze_(1) return fe_iner", "Tuple[Union[T_Mesh, Tuple[T_Mesh, ...]], Tuple[T, float]]: ref = meshes[0] center = ref[0].mean(0) ratio =", "or type(tensors) is List: out.append(to(list(tensor), device)) else: out.append(tensor) if len(tensors) == 1: return", "vs, faces, _ = igl.remove_duplicates(*mesh, 1e-8) return igl.decimate(vs, faces, num_faces)[1:3] @igl_prepare(torch.float32) def gaussian_curvature(mesh:", "u, v = torch.rand(*shape, device=device), torch.rand(*shape, device=device) mask = (u + v).gt(1) u[mask],", "is T: out.append(t.clone()) else: out.append(clone(*t)) return out def get_box(w: float, h: float, d:", "mapper def mesh_center(mesh: T_Mesh): return mesh[0].mean(0) def to_center(vs): max_vals = vs.max(0)[0] min_vals =", "dim=len(shape)) return uvw def get_sampled_fe(fe: T, mesh: T_Mesh, face_ids: T, uvw: TN) ->", "decimate_igl(mesh, num_faces: int): if mesh[1].shape[0] <= num_faces: return mesh vs, faces, _ =", "= fe[vs_ids] fe_iner = torch.einsum('sad,sa->sd', fe_unrolled, uvw) # if to_squeeze: # fe_iner =", "to(tensors, device: D) -> Union[T_Mesh, TS, T]: out = [] for tensor in", "> 0: samples = [samples] + [get_sampled_fe(fe, mesh, face_ids, uvw) for fe in", "edges = torch.tensor(list(edges), dtype=torch.int64, device=faces.device) return edges def edge_lengths(mesh: T_Mesh, edges_ind: TN =", "[4, 5, 6], [5, 7, 6], [0, 1, 5], [0, 5, 4], [2,", "= face_ids.unsqueeze(0) vs = torch.einsum('nad,na->nd', triangles[face_ids], weights) if to_squeeze: vs = vs.squeeze(0) return", "= mesh[0][mesh[1]] else: triangles: T = mesh to_squeeze = weights.dim() == 1 if", "def compute_barycentric(ind): triangles[:, ind] = vs alpha = compute_face_areas(triangles)[0] / areas triangles[:, ind]", "= meshes[0] return meshes, (center, ratio) def to(tensors, device: D) -> Union[T_Mesh, TS,", "(max_vals + min_vals) / 2 meshes_ = [] scale = float(scale / max_range)", "def get_box(w: float, h: float, d: float) -> T_Mesh: vs = [[0, 0,", "= True) -> T_Mesh: vs, faces = mesh if not in_place: vs =", "# fe_iner = fe_iner.squeeze_(1) return fe_iner def sample_on_faces(mesh: T_Mesh, num_samples: int) -> TS:", "int) -> TS: vs, faces = mesh uvw = sample_uvw([faces.shape[0], num_samples], vs.device) samples", "compute_face_areas(triangle) select = torch.arange(3) d_vs = vs_mid[:, None, :] - triangle d_f =", "edges_ind: TN = None) -> T: vs, faces = mesh if edges_ind is", "= fe.unsqueeze(1) if uvw is None: fe_iner = fe[face_ids] else: vs_ids = mesh[1][face_ids]", "fe_iner = torch.einsum('sad,sa->sd', fe_unrolled, uvw) # if to_squeeze: # fe_iner = fe_iner.squeeze_(1) return", "in tensors] return params def create_mapper(mask: T) -> T: mapper = torch.zeros(mask.shape[0], dtype=torch.int64,", "@igl_prepare(torch.float32) def lscm(mesh: T_Mesh, boundary_indices: T, boundary_coordinates: T) -> T: boundary_indices, boundary_coordinates =", "= mesh raw_edges = torch.cat([faces[:, [i, (i + 1) % 3]] for i", "faces[chosen_faces_inds] uvw = sample_uvw([num_samples], vs.device) samples = torch.einsum('sf,sfd->sd', uvw, vs[chosen_faces]) return samples, chosen_faces_inds,", "= query.cpu().numpy() return igl.fast_winding_number_for_meshes(*mesh, query) @igl_prepare(torch.float32, torch.float32, torch.float32, torch.float32) def principal_curvature(mesh: T_Mesh) ->", "ratio if len(meshes) == 1: meshes = meshes[0] return meshes, (center, ratio) def", "(vs_faces, torch.zeros(*vs_faces.shape[:2], 1, dtype=vs_faces.dtype, device=vs_faces.device)), dim=2) face_normals = torch.cross(vs_faces[:, 1, :] - vs_faces[:,", "vs = vs.squeeze(0) return vs def check_circle_angles(mesh: T_Mesh, center_ind: int, select: T) ->", "face_areas is None: face_areas, _ = compute_face_areas(mesh) face_areas[torch.isnan(face_areas)] = 0 weighted_p.append(face_areas / face_areas.sum())", "// len(weighted_p), replacement=True) for weights in weighted_p] if sample_s == SampleBy.HYB: chosen_faces_inds =", "meshes: vs, _ = mesh vs -= center[None, :].to(vs.device) vs /= ratio if", "vs -= center[None, :] vs *= scale return vs, faces def to_unit_cube(*meshes: T_Mesh_T,", ":] vs *= scale return vs, faces def to_unit_cube(*meshes: T_Mesh_T, scale=1, in_place: bool", "raw_edges = torch.cat([faces[:, [i, (i + 1) % 3]] for i in range(3)]).sort()", "uvw def get_samples(mesh: T_Mesh, num_samples: int, sample_s: SampleBy, *features: T) -> Union[T, TS]:", "max_val - min_val values = [(val - min_val) / scale for val in", "device, dtype = vs.device, vs.dtype vs = vs.to(device, dtype=torch.float64) triangles = triangles.to(device, dtype=torch.float64)", "in_place: bool = True, scale=1.) -> T_Mesh: vs, faces = mesh if not", "not in_place: vs = vs.clone() vs = to_center(vs) norm = vs.norm(2, dim=1).max() vs", "not T: triangles: T = mesh[0][mesh[1]] else: triangles: T = mesh to_squeeze =", "float]]: remove_me = 0 meshes = [(mesh, remove_me) if type(mesh) is T else", "0.5 * face_areas return face_areas, face_normals def check_sign_area(*meshes: T_Mesh) -> bool: for mesh", "[samples] + [get_sampled_fe(fe, mesh, face_ids, uvw) for fe in features] return samples, face_ids,", "== SampleBy.HYB: weighted_p.append(torch.ones(mesh[1].shape[0], device=mesh[0].device)) chosen_faces_inds = [torch.multinomial(weights, num_samples // len(weighted_p), replacement=True) for weights", "if to_squeeze: weights = weights.unsqueeze(0) face_ids = face_ids.unsqueeze(0) vs = torch.einsum('nad,na->nd', triangles[face_ids], weights)", "if vs_mid.dim() == 1: vs_mid = vs_mid.unsqueeze(0) triangle = triangle.unsqueeze(0) if normals is", "else mesh for mesh in meshes] vs, faces = meshes[0] max_vals = vs.max(0)[0]", "def compute_face_areas(mesh: Union[T_Mesh, T]) -> TS: face_normals = get_faces_normals(mesh) face_areas = torch.norm(face_normals, p=2,", "== SampleBy.HYB: chosen_faces_inds = torch.cat(chosen_faces_inds, dim=0) chosen_faces = faces[chosen_faces_inds] uvw = sample_uvw([num_samples], vs.device)", "inside_outside = winding_numbers.lt(.5).float() * 2 - 1 return inside_outside.to(device) @igl_prepare(torch.float32) def lscm(mesh: T_Mesh,", "else: vs_faces = mesh if vs_faces.shape[-1] == 2: vs_faces = torch.cat( (vs_faces, torch.zeros(*vs_faces.shape[:2],", "= (u + v).gt(1) u[mask], v[mask] = -u[mask] + 1, -v[mask] + 1", "2: vs_faces = torch.cat( (vs_faces, torch.zeros(*vs_faces.shape[:2], 1, dtype=vs_faces.dtype, device=vs_faces.device)), dim=2) face_normals = torch.cross(vs_faces[:,", "return gc @igl_prepare(torch.float32) def per_vertex_normals_igl(mesh: T_Mesh, weighting: int = 0) -> T: normals", "= mesh[0][mesh[1][faces_inds]] vs = vs * weights[:, :, None] return vs.sum(1) def sample_uvw(shape,", "faces = [[0, 2, 1], [1, 2, 3], [4, 5, 6], [5, 7,", "= torch.cat( (vs_faces, torch.zeros(*vs_faces.shape[:2], 1, dtype=vs_faces.dtype, device=vs_faces.device)), dim=2) face_normals = torch.cross(vs_faces[:, 1, :]", "h]] faces = [[0, 2, 1], [1, 2, 3], [4, 5, 6], [5,", "meshes_ = meshes_[0] return meshes_, (center, scale) def get_edges_ind(mesh: T_Mesh) -> T: vs,", "1) % 3]] for i in range(3)]).sort() raw_edges = raw_edges[0].cpu().numpy() edges = {(int(edge[0]),", "interpolate_vs(mesh: T_Mesh, faces_inds: T, weights: T) -> T: vs = mesh[0][mesh[1][faces_inds]] vs =", "def sample_uvw(shape, device: D): u, v = torch.rand(*shape, device=device), torch.rand(*shape, device=device) mask =", "return samples, chosen_faces_inds, uvw def get_samples(mesh: T_Mesh, num_samples: int, sample_s: SampleBy, *features: T)", "else: out.append(tensor) if len(tensors) == 1: return out[0] else: return tuple(out) def clone(*tensors:", "2 meshes_ = [] scale = float(scale / max_range) for mesh in meshes:", "return out def get_box(w: float, h: float, d: float) -> T_Mesh: vs =", "= True) -> Tuple[Union[T_Mesh_T, Tuple[T_Mesh_T, ...]], Tuple[T, float]]: remove_me = 0 meshes =", "/ scale for val in values] if len(values) == 1: return values[0] return", "remove_me else (vs_, faces_)) if len(meshes_) == 1: meshes_ = meshes_[0] return meshes_,", "boundary_coordinates: T) -> T: boundary_indices, boundary_coordinates = boundary_indices.numpy(), boundary_coordinates.numpy() check, uv = igl.lscm(*mesh,", "= vs[chosen_faces_inds] else: weighted_p = [] if sample_s == SampleBy.AREAS or sample_s ==", "T = mesh[0][mesh[1]] else: triangles: T = mesh to_squeeze = weights.dim() == 1", "T: query = query.cpu().numpy() return igl.fast_winding_number_for_meshes(*mesh, query) @igl_prepare(torch.float32, torch.float32, torch.float32, torch.float32) def principal_curvature(mesh:", "= [compute_barycentric(i) for i in range(3)] barycentric = torch.stack(barycentric, dim=1) # assert barycentric.sum(1).max().item()", "zip(result, dtypes)] return decoder @igl_prepare(torch.float32, torch.int64) def decimate_igl(mesh, num_faces: int): if mesh[1].shape[0] <=", "tensor in tensors: if type(tensor) is T: out.append(tensor.to(device, )) elif type(tensor) is tuple", "5, 4], [2, 6, 7], [3, 2, 7], [1, 3, 5], [3, 7,", "in features] return samples, face_ids, uvw def find_barycentric(vs: T, triangles: T) -> T:", "device: D): u, v = torch.rand(*shape, device=device), torch.rand(*shape, device=device) mask = (u +", "principal_curvature(mesh: T_Mesh) -> TS: out = igl.principal_curvature(*mesh) min_dir, max_dir, min_val, max_val = out", "remove_duplicate_vertices(mesh: T_Mesh, epsilon=1e-7) -> T_Mesh: vs, _, _, faces = igl.remove_duplicate_vertices(*mesh, epsilon) return", "in range(3)]).sort() raw_edges = raw_edges[0].cpu().numpy() edges = {(int(edge[0]), int(edge[1])) for edge in raw_edges}", "@igl_prepare(torch.float32) def gaussian_curvature(mesh: T_Mesh) -> T: gc = igl.gaussian_curvature(*mesh) return gc @igl_prepare(torch.float32) def", "+ 1 w = -u - v + 1 uvw = torch.stack([u, v,", "T_Mesh, num_samples: int) -> TS: vs, faces = mesh uvw = sample_uvw([faces.shape[0], num_samples],", "vs.squeeze(0) return vs def check_circle_angles(mesh: T_Mesh, center_ind: int, select: T) -> bool: vs,", "get_inside_outside(points: T, mesh: T_Mesh) -> T: device = points.device points = points.numpy() vs,", "mesh uvw = sample_uvw([faces.shape[0], num_samples], vs.device) samples = torch.einsum('fad,fna->fnd', vs[faces], uvw) return samples,", "torch.cat(chosen_faces_inds, dim=0) chosen_faces = faces[chosen_faces_inds] uvw = sample_uvw([num_samples], vs.device) samples = torch.einsum('sf,sfd->sd', uvw,", "torch.cross(vs_faces[:, 1, :] - vs_faces[:, 0, :], vs_faces[:, 2, :] - vs_faces[:, 1,", "vs *= scale * norm ** -1 return vs, faces def scale_from_ref(mesh: T_Mesh,", "= get_faces_normals(mesh) face_areas = torch.norm(face_normals, p=2, dim=1) face_areas_ = face_areas.clone() face_areas_[torch.eq(face_areas_, 0)] =", "= torch.arange(3) d_vs = vs_mid[:, None, :] - triangle d_f = triangle[:, select]", "for edge in raw_edges} edges = torch.tensor(list(edges), dtype=torch.int64, device=faces.device) return edges def edge_lengths(mesh:", "(center, scale) def get_edges_ind(mesh: T_Mesh) -> T: vs, faces = mesh raw_edges =", "faces, _ = igl.remove_duplicates(*mesh, 1e-8) return igl.decimate(vs, faces, num_faces)[1:3] @igl_prepare(torch.float32) def gaussian_curvature(mesh: T_Mesh)", "torch.norm(face_normals, p=2, dim=1) face_areas_ = face_areas.clone() face_areas_[torch.eq(face_areas_, 0)] = 1 face_normals = face_normals", "igl.per_vertex_normals(*mesh, weighting) return normals @igl_prepare(torch.float32, torch.int64) def remove_duplicate_vertices(mesh: T_Mesh, epsilon=1e-7) -> T_Mesh: vs,", "torch.einsum('nd,nad->na', normals, all_cross) is_over = all_dots.ge(0).long().sum(1).eq(3) return is_over def igl_prepare(*dtypes): def decoder(func): def", "triangle d_f = triangle[:, select] - triangle[:, (select + 1) % 3] all_cross", "+ 1, -v[mask] + 1 w = -u - v + 1 uvw", "meshes: face_normals = get_faces_normals(mesh) if not face_normals[:, 2].gt(0).all(): return False return True def", "T, weights: T) -> T: vs = mesh[0][mesh[1][faces_inds]] vs = vs * weights[:,", "== 1: fe = fe.unsqueeze(1) if uvw is None: fe_iner = fe[face_ids] else:", "vs[faces], uvw) return samples, uvw class SampleBy(Enum): AREAS = 0 FACES = 1", "= vs.clone() vs -= center[None, :] vs *= scale return vs, faces def", "meshes = meshes[0] return meshes, (center, ratio) def to(tensors, device: D) -> Union[T_Mesh,", "= sample_on_mesh(mesh, num_samples, sample_s=sample_s) if len(features) > 0: samples = [samples] + [get_sampled_fe(fe,", "return True def to_numpy(*tensors: T) -> ARRAYS: params = [param.detach().cpu().numpy() if type(param) is", "- vs_faces[:, 0, :], vs_faces[:, 2, :] - vs_faces[:, 1, :]) return face_normals", "= vs * weights[:, :, None] return vs.sum(1) def sample_uvw(shape, device: D): u,", "-> T: if vs_mid.dim() == 1: vs_mid = vs_mid.unsqueeze(0) triangle = triangle.unsqueeze(0) if", "sample_uvw([num_samples], vs.device) samples = torch.einsum('sf,sfd->sd', uvw, vs[chosen_faces]) return samples, chosen_faces_inds, uvw def get_samples(mesh:", "torch.stack([u, v, w], dim=len(shape)) return uvw def get_sampled_fe(fe: T, mesh: T_Mesh, face_ids: T,", "vs, faces = mesh raw_edges = torch.cat([faces[:, [i, (i + 1) % 3]]", "if not in_place: vs = vs.clone() vs -= center[None, :] vs *= scale", "all_vecs = vs[select] - vs[center_ind][None, :] all_vecs = all_vecs / all_vecs.norm(2, 1)[:, None]", "+ 1) % 3]] for i in range(3)]).sort() raw_edges = raw_edges[0].cpu().numpy() edges =", "mesh[0].mean(0) def to_center(vs): max_vals = vs.max(0)[0] min_vals = vs.min(0)[0] center = (max_vals +", "-v[mask] + 1 w = -u - v + 1 uvw = torch.stack([u,", "sample_s == SampleBy.FACES or sample_s == SampleBy.HYB: weighted_p.append(torch.ones(mesh[1].shape[0], device=mesh[0].device)) chosen_faces_inds = [torch.multinomial(weights, num_samples", "triangle = triangle.unsqueeze(0) if normals is None: _, normals = compute_face_areas(triangle) select =", "device): return torch.from_numpy(result).to(device, dtype=dtypes[0]) def to_torch_multi(result, device): return [torch.from_numpy(r).to(device, dtype=dtype) for r, dtype", "dtype=torch.int64) def normalize(t: T): t = t / t.norm(2, dim=1)[:, None] return t", "vs.device) samples = torch.einsum('sf,sfd->sd', uvw, vs[chosen_faces]) return samples, chosen_faces_inds, uvw def get_samples(mesh: T_Mesh,", "return torch.from_numpy(result).to(device, dtype=dtypes[0]) def to_torch_multi(result, device): return [torch.from_numpy(r).to(device, dtype=dtype) for r, dtype in", "t / t.norm(2, dim=1)[:, None] return t def interpolate_vs(mesh: T_Mesh, faces_inds: T, weights:", "fe[face_ids] else: vs_ids = mesh[1][face_ids] fe_unrolled = fe[vs_ids] fe_iner = torch.einsum('sad,sa->sd', fe_unrolled, uvw)", "if len(dtypes) == 1 else to_torch_multi return wrapper def to_torch_singe(result, device): return torch.from_numpy(result).to(device,", "min_val, max_val def get_inside_outside(points: T, mesh: T_Mesh) -> T: device = points.device points", "triangle.unsqueeze(0) if normals is None: _, normals = compute_face_areas(triangle) select = torch.arange(3) d_vs", "<= num_faces: return mesh vs, faces, _ = igl.remove_duplicates(*mesh, 1e-8) return igl.decimate(vs, faces,", "= [] for tensor in tensors: if type(tensor) is T: out.append(tensor.to(device, )) elif", "T: boundary_indices, boundary_coordinates = boundary_indices.numpy(), boundary_coordinates.numpy() check, uv = igl.lscm(*mesh, boundary_indices, boundary_coordinates) return", "weighted_p = [] if sample_s == SampleBy.AREAS or sample_s == SampleBy.HYB: if face_areas", "weighted_p] if sample_s == SampleBy.HYB: chosen_faces_inds = torch.cat(chosen_faces_inds, dim=0) chosen_faces = faces[chosen_faces_inds] uvw", "face_normals / face_areas_[:, None] face_areas = 0.5 * face_areas return face_areas, face_normals def", "mesh in meshes] vs, faces = meshes[0] max_vals = vs.max(0)[0] min_vals = vs.min(0)[0]", "for val in values]) scale = max_val - min_val values = [(val -", "mesh if edges_ind is None: edges_ind = get_edges_ind(mesh) edges = vs[edges_ind] return torch.norm(edges[:,", "recover[:, ind] return alpha device, dtype = vs.device, vs.dtype vs = vs.to(device, dtype=torch.float64)", "T_Mesh, query: T) -> T: query = query.cpu().numpy() return igl.fast_winding_number_for_meshes(*mesh, query) @igl_prepare(torch.float32, torch.float32,", "mesh if not in_place: vs = vs.clone() vs -= center[None, :] vs *=", "compute_face_areas(mesh: Union[T_Mesh, T]) -> TS: face_normals = get_faces_normals(mesh) face_areas = torch.norm(face_normals, p=2, dim=1)", "len(meshes_) == 1: meshes_ = meshes_[0] return meshes_, (center, scale) def get_edges_ind(mesh: T_Mesh)", "vs_faces[:, 0, :], vs_faces[:, 2, :] - vs_faces[:, 1, :]) return face_normals def", "if to_squeeze: vs = vs.squeeze(0) return vs def check_circle_angles(mesh: T_Mesh, center_ind: int, select:", "values[0] return values def get_faces_normals(mesh: Union[T_Mesh, T]) -> T: if type(mesh) is not", "vs, faces def to_unit_cube(*meshes: T_Mesh_T, scale=1, in_place: bool = True) -> Tuple[Union[T_Mesh_T, Tuple[T_Mesh_T,", "TNS: vs, faces = mesh if faces is None: # sample from pc", "vs_faces[:, 2, :] - vs_faces[:, 1, :]) return face_normals def compute_face_areas(mesh: Union[T_Mesh, T])", "vs, faces = mesh if not in_place: vs = vs.clone() vs = to_center(vs)", "mesh vs -= center[None, :].to(vs.device) vs /= ratio if len(meshes) == 1: meshes", "= vs_mid[:, None, :] - triangle d_f = triangle[:, select] - triangle[:, (select", "if faces is None: # sample from pc uvw = None if vs.shape[0]", "d: float) -> T_Mesh: vs = [[0, 0, 0], [w, 0, 0], [0,", "= triangle.unsqueeze(0) if normals is None: _, normals = compute_face_areas(triangle) select = torch.arange(3)", "faces = mesh uvw = sample_uvw([faces.shape[0], num_samples], vs.device) samples = torch.einsum('fad,fna->fnd', vs[faces], uvw)", "vs = [[0, 0, 0], [w, 0, 0], [0, d, 0], [w, d,", "- vs_faces[:, 1, :]) return face_normals def compute_face_areas(mesh: Union[T_Mesh, T]) -> TS: face_normals", "scale: float, in_place: bool = True) -> T_Mesh: vs, faces = mesh if", "faces = mesh[0].numpy(), mesh[1].numpy() winding_numbers = igl.fast_winding_number_for_meshes(vs, faces, points) winding_numbers = torch.from_numpy(winding_numbers) inside_outside", "== 1: vs_mid = vs_mid.unsqueeze(0) triangle = triangle.unsqueeze(0) if normals is None: _,", "torch.cat( (vs_faces, torch.zeros(*vs_faces.shape[:2], 1, dtype=vs_faces.dtype, device=vs_faces.device)), dim=2) face_normals = torch.cross(vs_faces[:, 1, :] -", "/= ratio if len(meshes) == 1: meshes = meshes[0] return meshes, (center, ratio)", "- 2 * np.pi).abs() < EPSILON def vs_over_triangle(vs_mid: T, triangle: T, normals=None) ->", "return to_torch(result, device) to_torch = to_torch_singe if len(dtypes) == 1 else to_torch_multi return", "T_Mesh) -> Tuple[Union[T_Mesh, Tuple[T_Mesh, ...]], Tuple[T, float]]: ref = meshes[0] center = ref[0].mean(0)", "= 0.5 * face_areas return face_areas, face_normals def check_sign_area(*meshes: T_Mesh) -> bool: for", "dtype=dtype) for r, dtype in zip(result, dtypes)] return decoder @igl_prepare(torch.float32, torch.int64) def decimate_igl(mesh,", "2 center = (max_vals + min_vals) / 2 meshes_ = [] scale =", "TS]) -> Union[TS, T_Mesh]: out = [] for t in tensors: if type(t)", "weights.dim() == 1 if to_squeeze: weights = weights.unsqueeze(0) face_ids = face_ids.unsqueeze(0) vs =", "scale * norm ** -1 return vs, faces def scale_from_ref(mesh: T_Mesh, center: T,", "[] for t in tensors: if type(t) is T: out.append(t.clone()) else: out.append(clone(*t)) return", "Union[T_Mesh, T], face_ids: T, weights: T) -> T: if type(mesh) is not T:", "pc uvw = None if vs.shape[0] < num_samples: chosen_faces_inds = torch.arange(vs.shape[0]) else: chosen_faces_inds", "vs_mid[:, None, :] - triangle d_f = triangle[:, select] - triangle[:, (select +", "return values[0] return values def get_faces_normals(mesh: Union[T_Mesh, T]) -> T: if type(mesh) is", "samples = vs[chosen_faces_inds] else: weighted_p = [] if sample_s == SampleBy.AREAS or sample_s", "face_normals = torch.cross(vs_faces[:, 1, :] - vs_faces[:, 0, :], vs_faces[:, 2, :] -", "1, -v[mask] + 1 w = -u - v + 1 uvw =", "T: device = points.device points = points.numpy() vs, faces = mesh[0].numpy(), mesh[1].numpy() winding_numbers", "= vs.device, vs.dtype vs = vs.to(device, dtype=torch.float64) triangles = triangles.to(device, dtype=torch.float64) areas, _", "if len(features) > 0: samples = [samples] + [get_sampled_fe(fe, mesh, face_ids, uvw) for", "max_dir, min_val, max_val = out return min_dir, max_dir, min_val, max_val def get_inside_outside(points: T,", "edge in raw_edges} edges = torch.tensor(list(edges), dtype=torch.int64, device=faces.device) return edges def edge_lengths(mesh: T_Mesh,", "0] - edges[:, 1], 2, dim=1) # in place def to_unit_edge(*meshes: T_Mesh) ->", "[(mesh, remove_me) if type(mesh) is T else mesh for mesh in meshes] vs,", "T_Mesh, epsilon=1e-7) -> T_Mesh: vs, _, _, faces = igl.remove_duplicate_vertices(*mesh, epsilon) return vs,", "- min_val values = [(val - min_val) / scale for val in values]", "[0, 0, h], [w, 0, h], [0, d, h], [w, d, h]] faces", "gc = igl.gaussian_curvature(*mesh) return gc @igl_prepare(torch.float32) def per_vertex_normals_igl(mesh: T_Mesh, weighting: int = 0)", "[param.detach().cpu().numpy() if type(param) is T else param for param in tensors] return params", "= meshes_[0] return meshes_, (center, scale) def get_edges_ind(mesh: T_Mesh) -> T: vs, faces", "T) -> T: vs = mesh[0][mesh[1][faces_inds]] vs = vs * weights[:, :, None]", "points = points.numpy() vs, faces = mesh[0].numpy(), mesh[1].numpy() winding_numbers = igl.fast_winding_number_for_meshes(vs, faces, points)", "= {(int(edge[0]), int(edge[1])) for edge in raw_edges} edges = torch.tensor(list(edges), dtype=torch.int64, device=faces.device) return", "vs = vs * weights[:, :, None] return vs.sum(1) def sample_uvw(shape, device: D):", ")) elif type(tensor) is tuple or type(tensors) is List: out.append(to(list(tensor), device)) else: out.append(tensor)", "-> TS: face_normals = get_faces_normals(mesh) face_areas = torch.norm(face_normals, p=2, dim=1) face_areas_ = face_areas.clone()", "= torch.norm(face_normals, p=2, dim=1) face_areas_ = face_areas.clone() face_areas_[torch.eq(face_areas_, 0)] = 1 face_normals =", "t in tensors: if type(t) is T: out.append(t.clone()) else: out.append(clone(*t)) return out def", "chosen_faces_inds = torch.argsort(torch.rand(vs.shape[0]))[:num_samples] samples = vs[chosen_faces_inds] else: weighted_p = [] if sample_s ==", "center = (max_vals + min_vals) / 2 meshes_ = [] scale = float(scale", "= mesh[1][face_ids] fe_unrolled = fe[vs_ids] fe_iner = torch.einsum('sad,sa->sd', fe_unrolled, uvw) # if to_squeeze:", "= vs.to(device, dtype=torch.float64) triangles = triangles.to(device, dtype=torch.float64) areas, _ = compute_face_areas(triangles) recover =", "vs = to_center(vs) norm = vs.norm(2, dim=1).max() vs *= scale * norm **", "faces_inds: T, weights: T) -> T: vs = mesh[0][mesh[1][faces_inds]] vs = vs *", "fe in features] return samples, face_ids, uvw def find_barycentric(vs: T, triangles: T) ->", "normalize(t: T): t = t / t.norm(2, dim=1)[:, None] return t def interpolate_vs(mesh:", "dtype=dtype) def from_barycentric(mesh: Union[T_Mesh, T], face_ids: T, weights: T) -> T: if type(mesh)", "d, h]] faces = [[0, 2, 1], [1, 2, 3], [4, 5, 6],", "T, triangles: T) -> T: def compute_barycentric(ind): triangles[:, ind] = vs alpha =", "h: float, d: float) -> T_Mesh: vs = [[0, 0, 0], [w, 0,", "* from constants import EPSILON import igl def scale_all(*values: T): max_val = max([val.max().item()", "in range(3)] barycentric = torch.stack(barycentric, dim=1) # assert barycentric.sum(1).max().item() <= 1 + EPSILON", "d_f = triangle[:, select] - triangle[:, (select + 1) % 3] all_cross =", "+ EPSILON return barycentric.to(device, dtype=dtype) def from_barycentric(mesh: Union[T_Mesh, T], face_ids: T, weights: T)", "= mesh if faces is None: # sample from pc uvw = None", "v[mask] = -u[mask] + 1, -v[mask] + 1 w = -u - v", "- vs[center_ind][None, :] all_vecs = all_vecs / all_vecs.norm(2, 1)[:, None] all_vecs = torch.cat([all_vecs,", "dim=1) face_areas_ = face_areas.clone() face_areas_[torch.eq(face_areas_, 0)] = 1 face_normals = face_normals / face_areas_[:,", "= vs.clone() vs = to_center(vs) norm = vs.norm(2, dim=1).max() vs *= scale *", "[0, d, 0], [w, d, 0], [0, 0, h], [w, 0, h], [0,", "torch.einsum('nad,na->nd', triangles[face_ids], weights) if to_squeeze: vs = vs.squeeze(0) return vs def check_circle_angles(mesh: T_Mesh,", "T: normals = igl.per_vertex_normals(*mesh, weighting) return normals @igl_prepare(torch.float32, torch.int64) def remove_duplicate_vertices(mesh: T_Mesh, epsilon=1e-7)", "[5, 7, 6], [0, 1, 5], [0, 5, 4], [2, 6, 7], [3,", "to_squeeze: vs = vs.squeeze(0) return vs def check_circle_angles(mesh: T_Mesh, center_ind: int, select: T)", "dtype=torch.float64) areas, _ = compute_face_areas(triangles) recover = triangles.clone() barycentric = [compute_barycentric(i) for i", "vs, faces = mesh vs_faces = vs[faces] else: vs_faces = mesh if vs_faces.shape[-1]", "@igl_prepare(torch.float32, torch.int64) def decimate_igl(mesh, num_faces: int): if mesh[1].shape[0] <= num_faces: return mesh vs,", "-> T: if type(mesh) is not T: vs, faces = mesh vs_faces =", "if type(param) is T else param for param in tensors] return params def", "= torch.argsort(torch.rand(vs.shape[0]))[:num_samples] samples = vs[chosen_faces_inds] else: weighted_p = [] if sample_s == SampleBy.AREAS", "triangles: T) -> T: def compute_barycentric(ind): triangles[:, ind] = vs alpha = compute_face_areas(triangles)[0]", "T_Mesh) -> T: gc = igl.gaussian_curvature(*mesh) return gc @igl_prepare(torch.float32) def per_vertex_normals_igl(mesh: T_Mesh, weighting:", "len(tensors) == 1: return out[0] else: return tuple(out) def clone(*tensors: Union[T, TS]) ->", "torch.from_numpy(result).to(device, dtype=dtypes[0]) def to_torch_multi(result, device): return [torch.from_numpy(r).to(device, dtype=dtype) for r, dtype in zip(result,", "raw_edges[0].cpu().numpy() edges = {(int(edge[0]), int(edge[1])) for edge in raw_edges} edges = torch.tensor(list(edges), dtype=torch.int64,", "from custom_types import * from constants import EPSILON import igl def scale_all(*values: T):", "[2, 6, 7], [3, 2, 7], [1, 3, 5], [3, 7, 5], [0,", "triangles[:, ind] = recover[:, ind] return alpha device, dtype = vs.device, vs.dtype vs", "if vs.shape[0] < num_samples: chosen_faces_inds = torch.arange(vs.shape[0]) else: chosen_faces_inds = torch.argsort(torch.rand(vs.shape[0]))[:num_samples] samples =", "uvw, vs[chosen_faces]) return samples, chosen_faces_inds, uvw def get_samples(mesh: T_Mesh, num_samples: int, sample_s: SampleBy,", "is remove_me else (vs_, faces_)) if len(meshes_) == 1: meshes_ = meshes_[0] return", "mesh[0][mesh[1][faces_inds]] vs = vs * weights[:, :, None] return vs.sum(1) def sample_uvw(shape, device:", "for fe in features] return samples, face_ids, uvw def find_barycentric(vs: T, triangles: T)", "mesh vs, faces, _ = igl.remove_duplicates(*mesh, 1e-8) return igl.decimate(vs, faces, num_faces)[1:3] @igl_prepare(torch.float32) def", "4], [2, 6, 7], [3, 2, 7], [1, 3, 5], [3, 7, 5],", "face_areas.clone() face_areas_[torch.eq(face_areas_, 0)] = 1 face_normals = face_normals / face_areas_[:, None] face_areas =", "face_normals def compute_face_areas(mesh: Union[T_Mesh, T]) -> TS: face_normals = get_faces_normals(mesh) face_areas = torch.norm(face_normals,", "vs, _, _, faces = igl.remove_duplicate_vertices(*mesh, epsilon) return vs, faces @igl_prepare(torch.float32) def winding_number_igl(mesh:", "torch.tensor(vs, dtype=torch.float32), torch.tensor(faces, dtype=torch.int64) def normalize(t: T): t = t / t.norm(2, dim=1)[:,", "vs[chosen_faces_inds] else: weighted_p = [] if sample_s == SampleBy.AREAS or sample_s == SampleBy.HYB:", "num_samples: int) -> TS: vs, faces = mesh uvw = sample_uvw([faces.shape[0], num_samples], vs.device)", "def to_unit_sphere(mesh: T_Mesh, in_place: bool = True, scale=1.) -> T_Mesh: vs, faces =", "ARRAYS: params = [param.detach().cpu().numpy() if type(param) is T else param for param in", "0) -> T: normals = igl.per_vertex_normals(*mesh, weighting) return normals @igl_prepare(torch.float32, torch.int64) def remove_duplicate_vertices(mesh:", "edges_ind = get_edges_ind(mesh) edges = vs[edges_ind] return torch.norm(edges[:, 0] - edges[:, 1], 2,", "= True, scale=1.) -> T_Mesh: vs, faces = mesh if not in_place: vs", "edges def edge_lengths(mesh: T_Mesh, edges_ind: TN = None) -> T: vs, faces =", "igl.remove_duplicates(*mesh, 1e-8) return igl.decimate(vs, faces, num_faces)[1:3] @igl_prepare(torch.float32) def gaussian_curvature(mesh: T_Mesh) -> T: gc", "faces = igl.remove_duplicate_vertices(*mesh, epsilon) return vs, faces @igl_prepare(torch.float32) def winding_number_igl(mesh: T_Mesh, query: T)", "== SampleBy.HYB: if face_areas is None: face_areas, _ = compute_face_areas(mesh) face_areas[torch.isnan(face_areas)] = 0", "1 HYB = 2 def sample_on_mesh(mesh: T_Mesh, num_samples: int, face_areas: TN = None,", "mesh[0].device, mesh[0].dtype vs, faces = to_numpy(*mesh) result = func((vs, faces), *args[1:], **kwargs) return", "return uv def interpulate_vs(mesh: T_Mesh, faces_inds: T, weights: T) -> T: vs =", "_ = compute_face_areas(triangles) recover = triangles.clone() barycentric = [compute_barycentric(i) for i in range(3)]", "= mesh vs -= center[None, :].to(vs.device) vs /= ratio if len(meshes) == 1:", "bool: vs, _ = mesh all_vecs = vs[select] - vs[center_ind][None, :] all_vecs =", "fe = fe.unsqueeze(1) if uvw is None: fe_iner = fe[face_ids] else: vs_ids =", "t = t / t.norm(2, dim=1)[:, None] return t def interpolate_vs(mesh: T_Mesh, faces_inds:", "EPSILON import igl def scale_all(*values: T): max_val = max([val.max().item() for val in values])", "for i in range(3)]).sort() raw_edges = raw_edges[0].cpu().numpy() edges = {(int(edge[0]), int(edge[1])) for edge", "from pc uvw = None if vs.shape[0] < num_samples: chosen_faces_inds = torch.arange(vs.shape[0]) else:", "None: edges_ind = get_edges_ind(mesh) edges = vs[edges_ind] return torch.norm(edges[:, 0] - edges[:, 1],", "num_samples: int, sample_s: SampleBy, *features: T) -> Union[T, TS]: samples, face_ids, uvw =", "all_vecs = all_vecs / all_vecs.norm(2, 1)[:, None] all_vecs = torch.cat([all_vecs, all_vecs[:1]], dim=0) all_cos", "mesh to_squeeze = weights.dim() == 1 if to_squeeze: weights = weights.unsqueeze(0) face_ids =", "return samples, uvw class SampleBy(Enum): AREAS = 0 FACES = 1 HYB =", "2, dim=1) # in place def to_unit_edge(*meshes: T_Mesh) -> Tuple[Union[T_Mesh, Tuple[T_Mesh, ...]], Tuple[T,", "HYB = 2 def sample_on_mesh(mesh: T_Mesh, num_samples: int, face_areas: TN = None, sample_s:", "chosen_faces_inds, uvw def get_samples(mesh: T_Mesh, num_samples: int, sample_s: SampleBy, *features: T) -> Union[T,", "to_torch_multi(result, device): return [torch.from_numpy(r).to(device, dtype=dtype) for r, dtype in zip(result, dtypes)] return decoder", "Union[T_Mesh, T]) -> TS: face_normals = get_faces_normals(mesh) face_areas = torch.norm(face_normals, p=2, dim=1) face_areas_", "TS: out = igl.principal_curvature(*mesh) min_dir, max_dir, min_val, max_val = out return min_dir, max_dir,", "mesh in meshes: face_normals = get_faces_normals(mesh) if not face_normals[:, 2].gt(0).all(): return False return", "chosen_faces = faces[chosen_faces_inds] uvw = sample_uvw([num_samples], vs.device) samples = torch.einsum('sf,sfd->sd', uvw, vs[chosen_faces]) return", "def from_barycentric(mesh: Union[T_Mesh, T], face_ids: T, weights: T) -> T: if type(mesh) is", "def create_mapper(mask: T) -> T: mapper = torch.zeros(mask.shape[0], dtype=torch.int64, device=mask.device) - 1 mapper[mask]", "vs.max(0)[0] min_vals = vs.min(0)[0] max_range = (max_vals - min_vals).max() / 2 center =", "def normalize(t: T): t = t / t.norm(2, dim=1)[:, None] return t def", "range(3)] barycentric = torch.stack(barycentric, dim=1) # assert barycentric.sum(1).max().item() <= 1 + EPSILON return", "T: triangles: T = mesh[0][mesh[1]] else: triangles: T = mesh to_squeeze = weights.dim()", "T, scale: float, in_place: bool = True) -> T_Mesh: vs, faces = mesh", "is_over = all_dots.ge(0).long().sum(1).eq(3) return is_over def igl_prepare(*dtypes): def decoder(func): def wrapper(*args, **kwargs): mesh", "vs, faces = mesh if not in_place: vs = vs.clone() vs -= center[None,", "return vs, faces def scale_from_ref(mesh: T_Mesh, center: T, scale: float, in_place: bool =", "to_numpy(*mesh) result = func((vs, faces), *args[1:], **kwargs) return to_torch(result, device) to_torch = to_torch_singe", "if uvw is None: fe_iner = fe[face_ids] else: vs_ids = mesh[1][face_ids] fe_unrolled =", "samples, face_ids, uvw def find_barycentric(vs: T, triangles: T) -> T: def compute_barycentric(ind): triangles[:,", "torch.zeros(mask.shape[0], dtype=torch.int64, device=mask.device) - 1 mapper[mask] = torch.arange(mask.sum().item(), device=mask.device) return mapper def mesh_center(mesh:", "is tuple or type(tensors) is List: out.append(to(list(tensor), device)) else: out.append(tensor) if len(tensors) ==", "= weights.dim() == 1 if to_squeeze: weights = weights.unsqueeze(0) face_ids = face_ids.unsqueeze(0) vs", "all_vecs = torch.cat([all_vecs, all_vecs[:1]], dim=0) all_cos = torch.einsum('nd,nd->n', all_vecs[1:], all_vecs[:-1]) all_angles = torch.acos_(all_cos)", "v, w], dim=len(shape)) return uvw def get_sampled_fe(fe: T, mesh: T_Mesh, face_ids: T, uvw:", "5], [0, 5, 4], [2, 6, 7], [3, 2, 7], [1, 3, 5],", "if sample_s == SampleBy.FACES or sample_s == SampleBy.HYB: weighted_p.append(torch.ones(mesh[1].shape[0], device=mesh[0].device)) chosen_faces_inds = [torch.multinomial(weights,", "T_Mesh, in_place: bool = True, scale=1.) -> T_Mesh: vs, faces = mesh if", "epsilon) return vs, faces @igl_prepare(torch.float32) def winding_number_igl(mesh: T_Mesh, query: T) -> T: query", "7, 5], [0, 4, 2], [2, 4, 6]] return torch.tensor(vs, dtype=torch.float32), torch.tensor(faces, dtype=torch.int64)", "+ 1 uvw = torch.stack([u, v, w], dim=len(shape)) return uvw def get_sampled_fe(fe: T,", "= func((vs, faces), *args[1:], **kwargs) return to_torch(result, device) to_torch = to_torch_singe if len(dtypes)", "face_normals = get_faces_normals(mesh) face_areas = torch.norm(face_normals, p=2, dim=1) face_areas_ = face_areas.clone() face_areas_[torch.eq(face_areas_, 0)]", "**kwargs): mesh = args[0] device, dtype = mesh[0].device, mesh[0].dtype vs, faces = to_numpy(*mesh)", "vs[edges_ind] return torch.norm(edges[:, 0] - edges[:, 1], 2, dim=1) # in place def", "param for param in tensors] return params def create_mapper(mask: T) -> T: mapper", "val in values]) min_val = min([val.min().item() for val in values]) scale = max_val", "mesh[1].numpy() winding_numbers = igl.fast_winding_number_for_meshes(vs, faces, points) winding_numbers = torch.from_numpy(winding_numbers) inside_outside = winding_numbers.lt(.5).float() *", "out.append(t.clone()) else: out.append(clone(*t)) return out def get_box(w: float, h: float, d: float) ->", "= 2 def sample_on_mesh(mesh: T_Mesh, num_samples: int, face_areas: TN = None, sample_s: SampleBy", "return decoder @igl_prepare(torch.float32, torch.int64) def decimate_igl(mesh, num_faces: int): if mesh[1].shape[0] <= num_faces: return", "mesh if faces is None: # sample from pc uvw = None if", "1: fe = fe.unsqueeze(1) if uvw is None: fe_iner = fe[face_ids] else: vs_ids", "mesh: T_Mesh) -> T: device = points.device points = points.numpy() vs, faces =", "= mesh if not in_place: vs = vs.clone() vs = to_center(vs) norm =", "return [torch.from_numpy(r).to(device, dtype=dtype) for r, dtype in zip(result, dtypes)] return decoder @igl_prepare(torch.float32, torch.int64)", "max_vals = vs.max(0)[0] min_vals = vs.min(0)[0] center = (max_vals + min_vals) / 2", "_ = compute_face_areas(mesh) face_areas[torch.isnan(face_areas)] = 0 weighted_p.append(face_areas / face_areas.sum()) if sample_s == SampleBy.FACES", "weights in weighted_p] if sample_s == SampleBy.HYB: chosen_faces_inds = torch.cat(chosen_faces_inds, dim=0) chosen_faces =", "raw_edges} edges = torch.tensor(list(edges), dtype=torch.int64, device=faces.device) return edges def edge_lengths(mesh: T_Mesh, edges_ind: TN", "[w, d, h]] faces = [[0, 2, 1], [1, 2, 3], [4, 5,", "t def interpolate_vs(mesh: T_Mesh, faces_inds: T, weights: T) -> T: vs = mesh[0][mesh[1][faces_inds]]", "interpulate_vs(mesh: T_Mesh, faces_inds: T, weights: T) -> T: vs = mesh[0][mesh[1][faces_inds]] vs =", "sample_s: SampleBy, *features: T) -> Union[T, TS]: samples, face_ids, uvw = sample_on_mesh(mesh, num_samples,", "def edge_lengths(mesh: T_Mesh, edges_ind: TN = None) -> T: vs, faces = mesh", "meshes[0] max_vals = vs.max(0)[0] min_vals = vs.min(0)[0] max_range = (max_vals - min_vals).max() /", "0 weighted_p.append(face_areas / face_areas.sum()) if sample_s == SampleBy.FACES or sample_s == SampleBy.HYB: weighted_p.append(torch.ones(mesh[1].shape[0],", "chosen_faces_inds = [torch.multinomial(weights, num_samples // len(weighted_p), replacement=True) for weights in weighted_p] if sample_s", "is None: fe_iner = fe[face_ids] else: vs_ids = mesh[1][face_ids] fe_unrolled = fe[vs_ids] fe_iner", "T_Mesh): return mesh[0].mean(0) def to_center(vs): max_vals = vs.max(0)[0] min_vals = vs.min(0)[0] center =", "return tuple(out) def clone(*tensors: Union[T, TS]) -> Union[TS, T_Mesh]: out = [] for", "SampleBy.AREAS or sample_s == SampleBy.HYB: if face_areas is None: face_areas, _ = compute_face_areas(mesh)", "vs_faces[:, 1, :]) return face_normals def compute_face_areas(mesh: Union[T_Mesh, T]) -> TS: face_normals =", "0], [0, 0, h], [w, 0, h], [0, d, h], [w, d, h]]", "device): return [torch.from_numpy(r).to(device, dtype=dtype) for r, dtype in zip(result, dtypes)] return decoder @igl_prepare(torch.float32,", "igl.lscm(*mesh, boundary_indices, boundary_coordinates) return uv def interpulate_vs(mesh: T_Mesh, faces_inds: T, weights: T) ->", "r, dtype in zip(result, dtypes)] return decoder @igl_prepare(torch.float32, torch.int64) def decimate_igl(mesh, num_faces: int):", "-> Union[TS, T_Mesh]: out = [] for t in tensors: if type(t) is", "scale_all(*values: T): max_val = max([val.max().item() for val in values]) min_val = min([val.min().item() for", "** -1 return vs, faces def scale_from_ref(mesh: T_Mesh, center: T, scale: float, in_place:", "*= scale return vs, faces def to_unit_cube(*meshes: T_Mesh_T, scale=1, in_place: bool = True)", "torch.rand(*shape, device=device) mask = (u + v).gt(1) u[mask], v[mask] = -u[mask] + 1,", "= mesh all_vecs = vs[select] - vs[center_ind][None, :] all_vecs = all_vecs / all_vecs.norm(2,", "boundary_coordinates) return uv def interpulate_vs(mesh: T_Mesh, faces_inds: T, weights: T) -> T: vs", "to_unit_sphere(mesh: T_Mesh, in_place: bool = True, scale=1.) -> T_Mesh: vs, faces = mesh", "= edge_lengths(ref).mean().item() for mesh in meshes: vs, _ = mesh vs -= center[None,", "< EPSILON def vs_over_triangle(vs_mid: T, triangle: T, normals=None) -> T: if vs_mid.dim() ==", "def scale_all(*values: T): max_val = max([val.max().item() for val in values]) min_val = min([val.min().item()", "all_vecs[:1]], dim=0) all_cos = torch.einsum('nd,nd->n', all_vecs[1:], all_vecs[:-1]) all_angles = torch.acos_(all_cos) all_angles = all_angles.sum()", "-> T: query = query.cpu().numpy() return igl.fast_winding_number_for_meshes(*mesh, query) @igl_prepare(torch.float32, torch.float32, torch.float32, torch.float32) def", "min_vals) / 2 meshes_ = [] scale = float(scale / max_range) for mesh", "vs /= ratio if len(meshes) == 1: meshes = meshes[0] return meshes, (center,", "h], [w, d, h]] faces = [[0, 2, 1], [1, 2, 3], [4,", "def decoder(func): def wrapper(*args, **kwargs): mesh = args[0] device, dtype = mesh[0].device, mesh[0].dtype", "assert barycentric.sum(1).max().item() <= 1 + EPSILON return barycentric.to(device, dtype=dtype) def from_barycentric(mesh: Union[T_Mesh, T],", "= compute_face_areas(triangles) recover = triangles.clone() barycentric = [compute_barycentric(i) for i in range(3)] barycentric", "scale) meshes_.append(vs_ if faces_ is remove_me else (vs_, faces_)) if len(meshes_) == 1:", "def igl_prepare(*dtypes): def decoder(func): def wrapper(*args, **kwargs): mesh = args[0] device, dtype =", "center, scale) meshes_.append(vs_ if faces_ is remove_me else (vs_, faces_)) if len(meshes_) ==", "edges[:, 1], 2, dim=1) # in place def to_unit_edge(*meshes: T_Mesh) -> Tuple[Union[T_Mesh, Tuple[T_Mesh,", "w], dim=len(shape)) return uvw def get_sampled_fe(fe: T, mesh: T_Mesh, face_ids: T, uvw: TN)", "barycentric = [compute_barycentric(i) for i in range(3)] barycentric = torch.stack(barycentric, dim=1) # assert", "sample_on_faces(mesh: T_Mesh, num_samples: int) -> TS: vs, faces = mesh uvw = sample_uvw([faces.shape[0],", "1: vs_mid = vs_mid.unsqueeze(0) triangle = triangle.unsqueeze(0) if normals is None: _, normals", "torch.tensor(list(edges), dtype=torch.int64, device=faces.device) return edges def edge_lengths(mesh: T_Mesh, edges_ind: TN = None) ->", "ratio) def to(tensors, device: D) -> Union[T_Mesh, TS, T]: out = [] for", "args[0] device, dtype = mesh[0].device, mesh[0].dtype vs, faces = to_numpy(*mesh) result = func((vs,", "fe_iner = fe[face_ids] else: vs_ids = mesh[1][face_ids] fe_unrolled = fe[vs_ids] fe_iner = torch.einsum('sad,sa->sd',", "= face_normals / face_areas_[:, None] face_areas = 0.5 * face_areas return face_areas, face_normals", "device: D) -> Union[T_Mesh, TS, T]: out = [] for tensor in tensors:", "float(scale / max_range) for mesh in meshes: vs_, faces_ = scale_from_ref(mesh, center, scale)", "T) -> bool: vs, _ = mesh all_vecs = vs[select] - vs[center_ind][None, :]", "return face_normals def compute_face_areas(mesh: Union[T_Mesh, T]) -> TS: face_normals = get_faces_normals(mesh) face_areas =", "vs_ids = mesh[1][face_ids] fe_unrolled = fe[vs_ids] fe_iner = torch.einsum('sad,sa->sd', fe_unrolled, uvw) # if", "+ v).gt(1) u[mask], v[mask] = -u[mask] + 1, -v[mask] + 1 w =", "mesh[0].numpy(), mesh[1].numpy() winding_numbers = igl.fast_winding_number_for_meshes(vs, faces, points) winding_numbers = torch.from_numpy(winding_numbers) inside_outside = winding_numbers.lt(.5).float()", "igl.principal_curvature(*mesh) min_dir, max_dir, min_val, max_val = out return min_dir, max_dir, min_val, max_val def", "5], [3, 7, 5], [0, 4, 2], [2, 4, 6]] return torch.tensor(vs, dtype=torch.float32),", "vs, faces = mesh if edges_ind is None: edges_ind = get_edges_ind(mesh) edges =", "ind] = recover[:, ind] return alpha device, dtype = vs.device, vs.dtype vs =", "epsilon=1e-7) -> T_Mesh: vs, _, _, faces = igl.remove_duplicate_vertices(*mesh, epsilon) return vs, faces", "= torch.einsum('sf,sfd->sd', uvw, vs[chosen_faces]) return samples, chosen_faces_inds, uvw def get_samples(mesh: T_Mesh, num_samples: int,", "None, sample_s: SampleBy = SampleBy.HYB) -> TNS: vs, faces = mesh if faces", "T: out.append(tensor.to(device, )) elif type(tensor) is tuple or type(tensors) is List: out.append(to(list(tensor), device))", "None] return t def interpolate_vs(mesh: T_Mesh, faces_inds: T, weights: T) -> T: vs", "(vs_, faces_)) if len(meshes_) == 1: meshes_ = meshes_[0] return meshes_, (center, scale)", "device=device), torch.rand(*shape, device=device) mask = (u + v).gt(1) u[mask], v[mask] = -u[mask] +", "device=mesh[0].device)) chosen_faces_inds = [torch.multinomial(weights, num_samples // len(weighted_p), replacement=True) for weights in weighted_p] if", "-> T: vs = mesh[0][mesh[1][faces_inds]] vs = vs * weights[:, :, None] return", "T, mesh: T_Mesh) -> T: device = points.device points = points.numpy() vs, faces", "in raw_edges} edges = torch.tensor(list(edges), dtype=torch.int64, device=faces.device) return edges def edge_lengths(mesh: T_Mesh, edges_ind:", "torch.norm(edges[:, 0] - edges[:, 1], 2, dim=1) # in place def to_unit_edge(*meshes: T_Mesh)", "normals @igl_prepare(torch.float32, torch.int64) def remove_duplicate_vertices(mesh: T_Mesh, epsilon=1e-7) -> T_Mesh: vs, _, _, faces", "= winding_numbers.lt(.5).float() * 2 - 1 return inside_outside.to(device) @igl_prepare(torch.float32) def lscm(mesh: T_Mesh, boundary_indices:", "= faces[chosen_faces_inds] uvw = sample_uvw([num_samples], vs.device) samples = torch.einsum('sf,sfd->sd', uvw, vs[chosen_faces]) return samples,", "uv def interpulate_vs(mesh: T_Mesh, faces_inds: T, weights: T) -> T: vs = mesh[0][mesh[1][faces_inds]]", "center[None, :] return vs def to_unit_sphere(mesh: T_Mesh, in_place: bool = True, scale=1.) ->", "-> bool: for mesh in meshes: face_normals = get_faces_normals(mesh) if not face_normals[:, 2].gt(0).all():", "vs * weights[:, :, None] return vs.sum(1) def sample_uvw(shape, device: D): u, v", "num_faces: return mesh vs, faces, _ = igl.remove_duplicates(*mesh, 1e-8) return igl.decimate(vs, faces, num_faces)[1:3]", "vs[chosen_faces]) return samples, chosen_faces_inds, uvw def get_samples(mesh: T_Mesh, num_samples: int, sample_s: SampleBy, *features:", "face_areas = 0.5 * face_areas return face_areas, face_normals def check_sign_area(*meshes: T_Mesh) -> bool:", "(max_vals + min_vals) / 2 vs -= center[None, :] return vs def to_unit_sphere(mesh:", "None if vs.shape[0] < num_samples: chosen_faces_inds = torch.arange(vs.shape[0]) else: chosen_faces_inds = torch.argsort(torch.rand(vs.shape[0]))[:num_samples] samples", "def lscm(mesh: T_Mesh, boundary_indices: T, boundary_coordinates: T) -> T: boundary_indices, boundary_coordinates = boundary_indices.numpy(),", "min_vals = vs.min(0)[0] center = (max_vals + min_vals) / 2 vs -= center[None,", "all_angles = all_angles.sum() return (all_angles - 2 * np.pi).abs() < EPSILON def vs_over_triangle(vs_mid:", "samples = [samples] + [get_sampled_fe(fe, mesh, face_ids, uvw) for fe in features] return", "uvw = torch.stack([u, v, w], dim=len(shape)) return uvw def get_sampled_fe(fe: T, mesh: T_Mesh,", "meshes] vs, faces = meshes[0] max_vals = vs.max(0)[0] min_vals = vs.min(0)[0] max_range =", "in_place: vs = vs.clone() vs = to_center(vs) norm = vs.norm(2, dim=1).max() vs *=", "out = [] for t in tensors: if type(t) is T: out.append(t.clone()) else:", "/ face_areas_[:, None] face_areas = 0.5 * face_areas return face_areas, face_normals def check_sign_area(*meshes:", "return igl.decimate(vs, faces, num_faces)[1:3] @igl_prepare(torch.float32) def gaussian_curvature(mesh: T_Mesh) -> T: gc = igl.gaussian_curvature(*mesh)", "is not T: vs, faces = mesh vs_faces = vs[faces] else: vs_faces =", "/ 2 vs -= center[None, :] return vs def to_unit_sphere(mesh: T_Mesh, in_place: bool", "device=faces.device) return edges def edge_lengths(mesh: T_Mesh, edges_ind: TN = None) -> T: vs,", "[[0, 0, 0], [w, 0, 0], [0, d, 0], [w, d, 0], [0,", "-> T: # to_squeeze = if fe.dim() == 1: fe = fe.unsqueeze(1) if", "bool = True) -> T_Mesh: vs, faces = mesh if not in_place: vs", "dtype=vs_faces.dtype, device=vs_faces.device)), dim=2) face_normals = torch.cross(vs_faces[:, 1, :] - vs_faces[:, 0, :], vs_faces[:,", "param in tensors] return params def create_mapper(mask: T) -> T: mapper = torch.zeros(mask.shape[0],", "scale=1.) -> T_Mesh: vs, faces = mesh if not in_place: vs = vs.clone()", "_, faces = igl.remove_duplicate_vertices(*mesh, epsilon) return vs, faces @igl_prepare(torch.float32) def winding_number_igl(mesh: T_Mesh, query:", "get_faces_normals(mesh) face_areas = torch.norm(face_normals, p=2, dim=1) face_areas_ = face_areas.clone() face_areas_[torch.eq(face_areas_, 0)] = 1", "return is_over def igl_prepare(*dtypes): def decoder(func): def wrapper(*args, **kwargs): mesh = args[0] device,", "vs.min(0)[0] center = (max_vals + min_vals) / 2 vs -= center[None, :] return", "torch.cat([all_vecs, all_vecs[:1]], dim=0) all_cos = torch.einsum('nd,nd->n', all_vecs[1:], all_vecs[:-1]) all_angles = torch.acos_(all_cos) all_angles =", "triangles.clone() barycentric = [compute_barycentric(i) for i in range(3)] barycentric = torch.stack(barycentric, dim=1) #", "= igl.remove_duplicate_vertices(*mesh, epsilon) return vs, faces @igl_prepare(torch.float32) def winding_number_igl(mesh: T_Mesh, query: T) ->", "int(edge[1])) for edge in raw_edges} edges = torch.tensor(list(edges), dtype=torch.int64, device=faces.device) return edges def", "edges_ind is None: edges_ind = get_edges_ind(mesh) edges = vs[edges_ind] return torch.norm(edges[:, 0] -", "triangle[:, (select + 1) % 3] all_cross = torch.cross(d_vs, d_f, dim=2) all_dots =", "mesh if not in_place: vs = vs.clone() vs = to_center(vs) norm = vs.norm(2,", "ind] = vs alpha = compute_face_areas(triangles)[0] / areas triangles[:, ind] = recover[:, ind]", "return min_dir, max_dir, min_val, max_val def get_inside_outside(points: T, mesh: T_Mesh) -> T: device", "-= center[None, :].to(vs.device) vs /= ratio if len(meshes) == 1: meshes = meshes[0]", "vs[center_ind][None, :] all_vecs = all_vecs / all_vecs.norm(2, 1)[:, None] all_vecs = torch.cat([all_vecs, all_vecs[:1]],", "inside_outside.to(device) @igl_prepare(torch.float32) def lscm(mesh: T_Mesh, boundary_indices: T, boundary_coordinates: T) -> T: boundary_indices, boundary_coordinates", "select: T) -> bool: vs, _ = mesh all_vecs = vs[select] - vs[center_ind][None,", "face_ids: T, uvw: TN) -> T: # to_squeeze = if fe.dim() == 1:", "max_dir, min_val, max_val def get_inside_outside(points: T, mesh: T_Mesh) -> T: device = points.device", "List: out.append(to(list(tensor), device)) else: out.append(tensor) if len(tensors) == 1: return out[0] else: return", "torch.arange(3) d_vs = vs_mid[:, None, :] - triangle d_f = triangle[:, select] -", "1, :]) return face_normals def compute_face_areas(mesh: Union[T_Mesh, T]) -> TS: face_normals = get_faces_normals(mesh)", "in zip(result, dtypes)] return decoder @igl_prepare(torch.float32, torch.int64) def decimate_igl(mesh, num_faces: int): if mesh[1].shape[0]", "uvw) # if to_squeeze: # fe_iner = fe_iner.squeeze_(1) return fe_iner def sample_on_faces(mesh: T_Mesh,", "= all_vecs / all_vecs.norm(2, 1)[:, None] all_vecs = torch.cat([all_vecs, all_vecs[:1]], dim=0) all_cos =", "None] return vs.sum(1) def sample_uvw(shape, device: D): u, v = torch.rand(*shape, device=device), torch.rand(*shape,", "7, 6], [0, 1, 5], [0, 5, 4], [2, 6, 7], [3, 2,", "T_Mesh) -> bool: for mesh in meshes: face_normals = get_faces_normals(mesh) if not face_normals[:,", "def winding_number_igl(mesh: T_Mesh, query: T) -> T: query = query.cpu().numpy() return igl.fast_winding_number_for_meshes(*mesh, query)", "center: T, scale: float, in_place: bool = True) -> T_Mesh: vs, faces =", "fe[vs_ids] fe_iner = torch.einsum('sad,sa->sd', fe_unrolled, uvw) # if to_squeeze: # fe_iner = fe_iner.squeeze_(1)", "Tuple[Union[T_Mesh_T, Tuple[T_Mesh_T, ...]], Tuple[T, float]]: remove_me = 0 meshes = [(mesh, remove_me) if", "vs_mid = vs_mid.unsqueeze(0) triangle = triangle.unsqueeze(0) if normals is None: _, normals =", "= vs[faces] else: vs_faces = mesh if vs_faces.shape[-1] == 2: vs_faces = torch.cat(", "if faces_ is remove_me else (vs_, faces_)) if len(meshes_) == 1: meshes_ =", "if not face_normals[:, 2].gt(0).all(): return False return True def to_numpy(*tensors: T) -> ARRAYS:", "center[None, :].to(vs.device) vs /= ratio if len(meshes) == 1: meshes = meshes[0] return", "out.append(tensor.to(device, )) elif type(tensor) is tuple or type(tensors) is List: out.append(to(list(tensor), device)) else:", "...]], Tuple[T, float]]: ref = meshes[0] center = ref[0].mean(0) ratio = edge_lengths(ref).mean().item() for", "== 1: return values[0] return values def get_faces_normals(mesh: Union[T_Mesh, T]) -> T: if", "for mesh in meshes] vs, faces = meshes[0] max_vals = vs.max(0)[0] min_vals =", "= igl.fast_winding_number_for_meshes(vs, faces, points) winding_numbers = torch.from_numpy(winding_numbers) inside_outside = winding_numbers.lt(.5).float() * 2 -", "vs_faces = vs[faces] else: vs_faces = mesh if vs_faces.shape[-1] == 2: vs_faces =", "-> T_Mesh: vs, _, _, faces = igl.remove_duplicate_vertices(*mesh, epsilon) return vs, faces @igl_prepare(torch.float32)", "- 1 mapper[mask] = torch.arange(mask.sum().item(), device=mask.device) return mapper def mesh_center(mesh: T_Mesh): return mesh[0].mean(0)", "T_Mesh) -> T: vs, faces = mesh raw_edges = torch.cat([faces[:, [i, (i +", "query.cpu().numpy() return igl.fast_winding_number_for_meshes(*mesh, query) @igl_prepare(torch.float32, torch.float32, torch.float32, torch.float32) def principal_curvature(mesh: T_Mesh) -> TS:", "= vs_mid.unsqueeze(0) triangle = triangle.unsqueeze(0) if normals is None: _, normals = compute_face_areas(triangle)", "-> T: device = points.device points = points.numpy() vs, faces = mesh[0].numpy(), mesh[1].numpy()", "= to_center(vs) norm = vs.norm(2, dim=1).max() vs *= scale * norm ** -1", "alpha = compute_face_areas(triangles)[0] / areas triangles[:, ind] = recover[:, ind] return alpha device,", "vs, faces = to_numpy(*mesh) result = func((vs, faces), *args[1:], **kwargs) return to_torch(result, device)", "= igl.remove_duplicates(*mesh, 1e-8) return igl.decimate(vs, faces, num_faces)[1:3] @igl_prepare(torch.float32) def gaussian_curvature(mesh: T_Mesh) -> T:", "= vs.max(0)[0] min_vals = vs.min(0)[0] max_range = (max_vals - min_vals).max() / 2 center", "[compute_barycentric(i) for i in range(3)] barycentric = torch.stack(barycentric, dim=1) # assert barycentric.sum(1).max().item() <=", "in values]) scale = max_val - min_val values = [(val - min_val) /", "min_val, max_val = out return min_dir, max_dir, min_val, max_val def get_inside_outside(points: T, mesh:", "face_areas.sum()) if sample_s == SampleBy.FACES or sample_s == SampleBy.HYB: weighted_p.append(torch.ones(mesh[1].shape[0], device=mesh[0].device)) chosen_faces_inds =", "min_val values = [(val - min_val) / scale for val in values] if", "for i in range(3)] barycentric = torch.stack(barycentric, dim=1) # assert barycentric.sum(1).max().item() <= 1", "D) -> Union[T_Mesh, TS, T]: out = [] for tensor in tensors: if", "igl.fast_winding_number_for_meshes(*mesh, query) @igl_prepare(torch.float32, torch.float32, torch.float32, torch.float32) def principal_curvature(mesh: T_Mesh) -> TS: out =", "def scale_from_ref(mesh: T_Mesh, center: T, scale: float, in_place: bool = True) -> T_Mesh:", "[w, d, 0], [0, 0, h], [w, 0, h], [0, d, h], [w,", "- v + 1 uvw = torch.stack([u, v, w], dim=len(shape)) return uvw def", "alpha device, dtype = vs.device, vs.dtype vs = vs.to(device, dtype=torch.float64) triangles = triangles.to(device,", "mesh[0].dtype vs, faces = to_numpy(*mesh) result = func((vs, faces), *args[1:], **kwargs) return to_torch(result,", "scale = max_val - min_val values = [(val - min_val) / scale for", "if type(tensor) is T: out.append(tensor.to(device, )) elif type(tensor) is tuple or type(tensors) is", "/ t.norm(2, dim=1)[:, None] return t def interpolate_vs(mesh: T_Mesh, faces_inds: T, weights: T)", "compute_face_areas(mesh) face_areas[torch.isnan(face_areas)] = 0 weighted_p.append(face_areas / face_areas.sum()) if sample_s == SampleBy.FACES or sample_s", "face_normals[:, 2].gt(0).all(): return False return True def to_numpy(*tensors: T) -> ARRAYS: params =", "[0, 1, 5], [0, 5, 4], [2, 6, 7], [3, 2, 7], [1,", "points.device points = points.numpy() vs, faces = mesh[0].numpy(), mesh[1].numpy() winding_numbers = igl.fast_winding_number_for_meshes(vs, faces,", "def get_sampled_fe(fe: T, mesh: T_Mesh, face_ids: T, uvw: TN) -> T: # to_squeeze", "p=2, dim=1) face_areas_ = face_areas.clone() face_areas_[torch.eq(face_areas_, 0)] = 1 face_normals = face_normals /", ":] all_vecs = all_vecs / all_vecs.norm(2, 1)[:, None] all_vecs = torch.cat([all_vecs, all_vecs[:1]], dim=0)", "dtypes)] return decoder @igl_prepare(torch.float32, torch.int64) def decimate_igl(mesh, num_faces: int): if mesh[1].shape[0] <= num_faces:", "vs.clone() vs -= center[None, :] vs *= scale return vs, faces def to_unit_cube(*meshes:", "out return min_dir, max_dir, min_val, max_val def get_inside_outside(points: T, mesh: T_Mesh) -> T:", "out[0] else: return tuple(out) def clone(*tensors: Union[T, TS]) -> Union[TS, T_Mesh]: out =", "else to_torch_multi return wrapper def to_torch_singe(result, device): return torch.from_numpy(result).to(device, dtype=dtypes[0]) def to_torch_multi(result, device):", "bool = True, scale=1.) -> T_Mesh: vs, faces = mesh if not in_place:", "[3, 2, 7], [1, 3, 5], [3, 7, 5], [0, 4, 2], [2,", "T, normals=None) -> T: if vs_mid.dim() == 1: vs_mid = vs_mid.unsqueeze(0) triangle =", "vs_mid.unsqueeze(0) triangle = triangle.unsqueeze(0) if normals is None: _, normals = compute_face_areas(triangle) select", "def check_circle_angles(mesh: T_Mesh, center_ind: int, select: T) -> bool: vs, _ = mesh", "normals = compute_face_areas(triangle) select = torch.arange(3) d_vs = vs_mid[:, None, :] - triangle", "to_torch_multi return wrapper def to_torch_singe(result, device): return torch.from_numpy(result).to(device, dtype=dtypes[0]) def to_torch_multi(result, device): return", "ratio = edge_lengths(ref).mean().item() for mesh in meshes: vs, _ = mesh vs -=", "to_squeeze = if fe.dim() == 1: fe = fe.unsqueeze(1) if uvw is None:", "= vs.min(0)[0] max_range = (max_vals - min_vals).max() / 2 center = (max_vals +", "SampleBy.FACES or sample_s == SampleBy.HYB: weighted_p.append(torch.ones(mesh[1].shape[0], device=mesh[0].device)) chosen_faces_inds = [torch.multinomial(weights, num_samples // len(weighted_p),", "tensors: if type(tensor) is T: out.append(tensor.to(device, )) elif type(tensor) is tuple or type(tensors)", "4, 6]] return torch.tensor(vs, dtype=torch.float32), torch.tensor(faces, dtype=torch.int64) def normalize(t: T): t = t", "= torch.cat(chosen_faces_inds, dim=0) chosen_faces = faces[chosen_faces_inds] uvw = sample_uvw([num_samples], vs.device) samples = torch.einsum('sf,sfd->sd',", "T_Mesh: vs = [[0, 0, 0], [w, 0, 0], [0, d, 0], [w,", "0, :], vs_faces[:, 2, :] - vs_faces[:, 1, :]) return face_normals def compute_face_areas(mesh:", "+ [get_sampled_fe(fe, mesh, face_ids, uvw) for fe in features] return samples, face_ids, uvw", "= to_torch_singe if len(dtypes) == 1 else to_torch_multi return wrapper def to_torch_singe(result, device):", "= torch.stack(barycentric, dim=1) # assert barycentric.sum(1).max().item() <= 1 + EPSILON return barycentric.to(device, dtype=dtype)", "[0, 5, 4], [2, 6, 7], [3, 2, 7], [1, 3, 5], [3,", "all_cross = torch.cross(d_vs, d_f, dim=2) all_dots = torch.einsum('nd,nad->na', normals, all_cross) is_over = all_dots.ge(0).long().sum(1).eq(3)", "select] - triangle[:, (select + 1) % 3] all_cross = torch.cross(d_vs, d_f, dim=2)", "place def to_unit_edge(*meshes: T_Mesh) -> Tuple[Union[T_Mesh, Tuple[T_Mesh, ...]], Tuple[T, float]]: ref = meshes[0]", "3], [4, 5, 6], [5, 7, 6], [0, 1, 5], [0, 5, 4],", "weights: T) -> T: if type(mesh) is not T: triangles: T = mesh[0][mesh[1]]", "def remove_duplicate_vertices(mesh: T_Mesh, epsilon=1e-7) -> T_Mesh: vs, _, _, faces = igl.remove_duplicate_vertices(*mesh, epsilon)", "igl def scale_all(*values: T): max_val = max([val.max().item() for val in values]) min_val =", "= torch.einsum('nd,nd->n', all_vecs[1:], all_vecs[:-1]) all_angles = torch.acos_(all_cos) all_angles = all_angles.sum() return (all_angles -", "vs = vs.clone() vs = to_center(vs) norm = vs.norm(2, dim=1).max() vs *= scale", "dim=1) # assert barycentric.sum(1).max().item() <= 1 + EPSILON return barycentric.to(device, dtype=dtype) def from_barycentric(mesh:", "= torch.cat([all_vecs, all_vecs[:1]], dim=0) all_cos = torch.einsum('nd,nd->n', all_vecs[1:], all_vecs[:-1]) all_angles = torch.acos_(all_cos) all_angles", "all_vecs / all_vecs.norm(2, 1)[:, None] all_vecs = torch.cat([all_vecs, all_vecs[:1]], dim=0) all_cos = torch.einsum('nd,nd->n',", "<= 1 + EPSILON return barycentric.to(device, dtype=dtype) def from_barycentric(mesh: Union[T_Mesh, T], face_ids: T,", "0, 0], [0, d, 0], [w, d, 0], [0, 0, h], [w, 0,", "max_range) for mesh in meshes: vs_, faces_ = scale_from_ref(mesh, center, scale) meshes_.append(vs_ if", "meshes_.append(vs_ if faces_ is remove_me else (vs_, faces_)) if len(meshes_) == 1: meshes_", "igl.decimate(vs, faces, num_faces)[1:3] @igl_prepare(torch.float32) def gaussian_curvature(mesh: T_Mesh) -> T: gc = igl.gaussian_curvature(*mesh) return", "return vs, faces def to_unit_cube(*meshes: T_Mesh_T, scale=1, in_place: bool = True) -> Tuple[Union[T_Mesh_T,", "import EPSILON import igl def scale_all(*values: T): max_val = max([val.max().item() for val in", "def clone(*tensors: Union[T, TS]) -> Union[TS, T_Mesh]: out = [] for t in", "= [[0, 0, 0], [w, 0, 0], [0, d, 0], [w, d, 0],", "return mesh[0].mean(0) def to_center(vs): max_vals = vs.max(0)[0] min_vals = vs.min(0)[0] center = (max_vals", "def to(tensors, device: D) -> Union[T_Mesh, TS, T]: out = [] for tensor", "v + 1 uvw = torch.stack([u, v, w], dim=len(shape)) return uvw def get_sampled_fe(fe:", "mesh if vs_faces.shape[-1] == 2: vs_faces = torch.cat( (vs_faces, torch.zeros(*vs_faces.shape[:2], 1, dtype=vs_faces.dtype, device=vs_faces.device)),", "0 meshes = [(mesh, remove_me) if type(mesh) is T else mesh for mesh", "int, face_areas: TN = None, sample_s: SampleBy = SampleBy.HYB) -> TNS: vs, faces", "in values] if len(values) == 1: return values[0] return values def get_faces_normals(mesh: Union[T_Mesh,", "torch.float32, torch.float32, torch.float32) def principal_curvature(mesh: T_Mesh) -> TS: out = igl.principal_curvature(*mesh) min_dir, max_dir,", "uvw = sample_uvw([faces.shape[0], num_samples], vs.device) samples = torch.einsum('fad,fna->fnd', vs[faces], uvw) return samples, uvw", "= fe[face_ids] else: vs_ids = mesh[1][face_ids] fe_unrolled = fe[vs_ids] fe_iner = torch.einsum('sad,sa->sd', fe_unrolled,", "* norm ** -1 return vs, faces def scale_from_ref(mesh: T_Mesh, center: T, scale:", "faces @igl_prepare(torch.float32) def winding_number_igl(mesh: T_Mesh, query: T) -> T: query = query.cpu().numpy() return", "= torch.einsum('nd,nad->na', normals, all_cross) is_over = all_dots.ge(0).long().sum(1).eq(3) return is_over def igl_prepare(*dtypes): def decoder(func):", "samples = torch.einsum('fad,fna->fnd', vs[faces], uvw) return samples, uvw class SampleBy(Enum): AREAS = 0", "def get_faces_normals(mesh: Union[T_Mesh, T]) -> T: if type(mesh) is not T: vs, faces", "in meshes] vs, faces = meshes[0] max_vals = vs.max(0)[0] min_vals = vs.min(0)[0] max_range", "all_vecs.norm(2, 1)[:, None] all_vecs = torch.cat([all_vecs, all_vecs[:1]], dim=0) all_cos = torch.einsum('nd,nd->n', all_vecs[1:], all_vecs[:-1])", "vs.shape[0] < num_samples: chosen_faces_inds = torch.arange(vs.shape[0]) else: chosen_faces_inds = torch.argsort(torch.rand(vs.shape[0]))[:num_samples] samples = vs[chosen_faces_inds]", "chosen_faces_inds = torch.cat(chosen_faces_inds, dim=0) chosen_faces = faces[chosen_faces_inds] uvw = sample_uvw([num_samples], vs.device) samples =", "device) to_torch = to_torch_singe if len(dtypes) == 1 else to_torch_multi return wrapper def", "dim=0) chosen_faces = faces[chosen_faces_inds] uvw = sample_uvw([num_samples], vs.device) samples = torch.einsum('sf,sfd->sd', uvw, vs[chosen_faces])", "not T: vs, faces = mesh vs_faces = vs[faces] else: vs_faces = mesh", "uvw = None if vs.shape[0] < num_samples: chosen_faces_inds = torch.arange(vs.shape[0]) else: chosen_faces_inds =", "= vs[edges_ind] return torch.norm(edges[:, 0] - edges[:, 1], 2, dim=1) # in place", "boundary_indices, boundary_coordinates = boundary_indices.numpy(), boundary_coordinates.numpy() check, uv = igl.lscm(*mesh, boundary_indices, boundary_coordinates) return uv", "in meshes: vs_, faces_ = scale_from_ref(mesh, center, scale) meshes_.append(vs_ if faces_ is remove_me", "vs.min(0)[0] max_range = (max_vals - min_vals).max() / 2 center = (max_vals + min_vals)", "= vs alpha = compute_face_areas(triangles)[0] / areas triangles[:, ind] = recover[:, ind] return", "vs.max(0)[0] min_vals = vs.min(0)[0] center = (max_vals + min_vals) / 2 vs -=", "return params def create_mapper(mask: T) -> T: mapper = torch.zeros(mask.shape[0], dtype=torch.int64, device=mask.device) -", "None) -> T: vs, faces = mesh if edges_ind is None: edges_ind =", "-u[mask] + 1, -v[mask] + 1 w = -u - v + 1", "/ 2 center = (max_vals + min_vals) / 2 meshes_ = [] scale", "T = mesh to_squeeze = weights.dim() == 1 if to_squeeze: weights = weights.unsqueeze(0)", "if sample_s == SampleBy.HYB: chosen_faces_inds = torch.cat(chosen_faces_inds, dim=0) chosen_faces = faces[chosen_faces_inds] uvw =", "1e-8) return igl.decimate(vs, faces, num_faces)[1:3] @igl_prepare(torch.float32) def gaussian_curvature(mesh: T_Mesh) -> T: gc =", "-> Union[T_Mesh, TS, T]: out = [] for tensor in tensors: if type(tensor)", "@igl_prepare(torch.float32, torch.float32, torch.float32, torch.float32) def principal_curvature(mesh: T_Mesh) -> TS: out = igl.principal_curvature(*mesh) min_dir,", "return edges def edge_lengths(mesh: T_Mesh, edges_ind: TN = None) -> T: vs, faces", "= out return min_dir, max_dir, min_val, max_val def get_inside_outside(points: T, mesh: T_Mesh) ->", "def to_unit_edge(*meshes: T_Mesh) -> Tuple[Union[T_Mesh, Tuple[T_Mesh, ...]], Tuple[T, float]]: ref = meshes[0] center", "areas, _ = compute_face_areas(triangles) recover = triangles.clone() barycentric = [compute_barycentric(i) for i in", "True) -> T_Mesh: vs, faces = mesh if not in_place: vs = vs.clone()", "in meshes: face_normals = get_faces_normals(mesh) if not face_normals[:, 2].gt(0).all(): return False return True", "def get_edges_ind(mesh: T_Mesh) -> T: vs, faces = mesh raw_edges = torch.cat([faces[:, [i,", "1, 5], [0, 5, 4], [2, 6, 7], [3, 2, 7], [1, 3,", "d_vs = vs_mid[:, None, :] - triangle d_f = triangle[:, select] - triangle[:,", "fe_iner def sample_on_faces(mesh: T_Mesh, num_samples: int) -> TS: vs, faces = mesh uvw", "dtype in zip(result, dtypes)] return decoder @igl_prepare(torch.float32, torch.int64) def decimate_igl(mesh, num_faces: int): if", "= raw_edges[0].cpu().numpy() edges = {(int(edge[0]), int(edge[1])) for edge in raw_edges} edges = torch.tensor(list(edges),", "torch.int64) def remove_duplicate_vertices(mesh: T_Mesh, epsilon=1e-7) -> T_Mesh: vs, _, _, faces = igl.remove_duplicate_vertices(*mesh,", ":] - triangle d_f = triangle[:, select] - triangle[:, (select + 1) %", "T: vs, faces = mesh if edges_ind is None: edges_ind = get_edges_ind(mesh) edges", "face_normals = face_normals / face_areas_[:, None] face_areas = 0.5 * face_areas return face_areas,", "T_Mesh, num_samples: int, face_areas: TN = None, sample_s: SampleBy = SampleBy.HYB) -> TNS:", "(all_angles - 2 * np.pi).abs() < EPSILON def vs_over_triangle(vs_mid: T, triangle: T, normals=None)", "import * from constants import EPSILON import igl def scale_all(*values: T): max_val =", "= recover[:, ind] return alpha device, dtype = vs.device, vs.dtype vs = vs.to(device,", "get_faces_normals(mesh: Union[T_Mesh, T]) -> T: if type(mesh) is not T: vs, faces =", "else: out.append(clone(*t)) return out def get_box(w: float, h: float, d: float) -> T_Mesh:", "features] return samples, face_ids, uvw def find_barycentric(vs: T, triangles: T) -> T: def", "face_areas = torch.norm(face_normals, p=2, dim=1) face_areas_ = face_areas.clone() face_areas_[torch.eq(face_areas_, 0)] = 1 face_normals", "return face_areas, face_normals def check_sign_area(*meshes: T_Mesh) -> bool: for mesh in meshes: face_normals", "num_faces)[1:3] @igl_prepare(torch.float32) def gaussian_curvature(mesh: T_Mesh) -> T: gc = igl.gaussian_curvature(*mesh) return gc @igl_prepare(torch.float32)", "center_ind: int, select: T) -> bool: vs, _ = mesh all_vecs = vs[select]", "find_barycentric(vs: T, triangles: T) -> T: def compute_barycentric(ind): triangles[:, ind] = vs alpha", "result = func((vs, faces), *args[1:], **kwargs) return to_torch(result, device) to_torch = to_torch_singe if", "fe.dim() == 1: fe = fe.unsqueeze(1) if uvw is None: fe_iner = fe[face_ids]", "center = (max_vals + min_vals) / 2 vs -= center[None, :] return vs", "all_angles = torch.acos_(all_cos) all_angles = all_angles.sum() return (all_angles - 2 * np.pi).abs() <", "scale=1, in_place: bool = True) -> Tuple[Union[T_Mesh_T, Tuple[T_Mesh_T, ...]], Tuple[T, float]]: remove_me =", "to_numpy(*tensors: T) -> ARRAYS: params = [param.detach().cpu().numpy() if type(param) is T else param", "sample_on_mesh(mesh, num_samples, sample_s=sample_s) if len(features) > 0: samples = [samples] + [get_sampled_fe(fe, mesh,", "for val in values]) min_val = min([val.min().item() for val in values]) scale =", "remove_me) if type(mesh) is T else mesh for mesh in meshes] vs, faces", "= mesh if edges_ind is None: edges_ind = get_edges_ind(mesh) edges = vs[edges_ind] return", "vs, faces @igl_prepare(torch.float32) def winding_number_igl(mesh: T_Mesh, query: T) -> T: query = query.cpu().numpy()", "[i, (i + 1) % 3]] for i in range(3)]).sort() raw_edges = raw_edges[0].cpu().numpy()", "func((vs, faces), *args[1:], **kwargs) return to_torch(result, device) to_torch = to_torch_singe if len(dtypes) ==", "== SampleBy.FACES or sample_s == SampleBy.HYB: weighted_p.append(torch.ones(mesh[1].shape[0], device=mesh[0].device)) chosen_faces_inds = [torch.multinomial(weights, num_samples //", "_ = mesh all_vecs = vs[select] - vs[center_ind][None, :] all_vecs = all_vecs /", "winding_numbers = torch.from_numpy(winding_numbers) inside_outside = winding_numbers.lt(.5).float() * 2 - 1 return inside_outside.to(device) @igl_prepare(torch.float32)", "1 if to_squeeze: weights = weights.unsqueeze(0) face_ids = face_ids.unsqueeze(0) vs = torch.einsum('nad,na->nd', triangles[face_ids],", "== 1: meshes_ = meshes_[0] return meshes_, (center, scale) def get_edges_ind(mesh: T_Mesh) ->", "values]) scale = max_val - min_val values = [(val - min_val) / scale", "params def create_mapper(mask: T) -> T: mapper = torch.zeros(mask.shape[0], dtype=torch.int64, device=mask.device) - 1", "ref = meshes[0] center = ref[0].mean(0) ratio = edge_lengths(ref).mean().item() for mesh in meshes:", "scale_from_ref(mesh: T_Mesh, center: T, scale: float, in_place: bool = True) -> T_Mesh: vs,", "min([val.min().item() for val in values]) scale = max_val - min_val values = [(val", "= 0 FACES = 1 HYB = 2 def sample_on_mesh(mesh: T_Mesh, num_samples: int,", "{(int(edge[0]), int(edge[1])) for edge in raw_edges} edges = torch.tensor(list(edges), dtype=torch.int64, device=faces.device) return edges", "T, boundary_coordinates: T) -> T: boundary_indices, boundary_coordinates = boundary_indices.numpy(), boundary_coordinates.numpy() check, uv =", "TS: face_normals = get_faces_normals(mesh) face_areas = torch.norm(face_normals, p=2, dim=1) face_areas_ = face_areas.clone() face_areas_[torch.eq(face_areas_,", "type(t) is T: out.append(t.clone()) else: out.append(clone(*t)) return out def get_box(w: float, h: float,", "SampleBy = SampleBy.HYB) -> TNS: vs, faces = mesh if faces is None:", "is T else param for param in tensors] return params def create_mapper(mask: T)", "T_Mesh, center_ind: int, select: T) -> bool: vs, _ = mesh all_vecs =", "-u - v + 1 uvw = torch.stack([u, v, w], dim=len(shape)) return uvw", "weighted_p.append(torch.ones(mesh[1].shape[0], device=mesh[0].device)) chosen_faces_inds = [torch.multinomial(weights, num_samples // len(weighted_p), replacement=True) for weights in weighted_p]", "decoder(func): def wrapper(*args, **kwargs): mesh = args[0] device, dtype = mesh[0].device, mesh[0].dtype vs,", "boundary_coordinates.numpy() check, uv = igl.lscm(*mesh, boundary_indices, boundary_coordinates) return uv def interpulate_vs(mesh: T_Mesh, faces_inds:", "igl_prepare(*dtypes): def decoder(func): def wrapper(*args, **kwargs): mesh = args[0] device, dtype = mesh[0].device,", "= torch.tensor(list(edges), dtype=torch.int64, device=faces.device) return edges def edge_lengths(mesh: T_Mesh, edges_ind: TN = None)", "[(val - min_val) / scale for val in values] if len(values) == 1:", "device)) else: out.append(tensor) if len(tensors) == 1: return out[0] else: return tuple(out) def", "* weights[:, :, None] return vs.sum(1) def sample_uvw(shape, device: D): u, v =", "meshes[0] center = ref[0].mean(0) ratio = edge_lengths(ref).mean().item() for mesh in meshes: vs, _", "d, h], [w, d, h]] faces = [[0, 2, 1], [1, 2, 3],", "sample_s == SampleBy.HYB: weighted_p.append(torch.ones(mesh[1].shape[0], device=mesh[0].device)) chosen_faces_inds = [torch.multinomial(weights, num_samples // len(weighted_p), replacement=True) for", "vs_faces = torch.cat( (vs_faces, torch.zeros(*vs_faces.shape[:2], 1, dtype=vs_faces.dtype, device=vs_faces.device)), dim=2) face_normals = torch.cross(vs_faces[:, 1,", "meshes_ = [] scale = float(scale / max_range) for mesh in meshes: vs_,", "Tuple[T, float]]: ref = meshes[0] center = ref[0].mean(0) ratio = edge_lengths(ref).mean().item() for mesh", "boundary_indices, boundary_coordinates) return uv def interpulate_vs(mesh: T_Mesh, faces_inds: T, weights: T) -> T:", "vs.device, vs.dtype vs = vs.to(device, dtype=torch.float64) triangles = triangles.to(device, dtype=torch.float64) areas, _ =", "face_normals def check_sign_area(*meshes: T_Mesh) -> bool: for mesh in meshes: face_normals = get_faces_normals(mesh)", "...]], Tuple[T, float]]: remove_me = 0 meshes = [(mesh, remove_me) if type(mesh) is", "weighting) return normals @igl_prepare(torch.float32, torch.int64) def remove_duplicate_vertices(mesh: T_Mesh, epsilon=1e-7) -> T_Mesh: vs, _,", "num_samples], vs.device) samples = torch.einsum('fad,fna->fnd', vs[faces], uvw) return samples, uvw class SampleBy(Enum): AREAS", "EPSILON return barycentric.to(device, dtype=dtype) def from_barycentric(mesh: Union[T_Mesh, T], face_ids: T, weights: T) ->", "None] all_vecs = torch.cat([all_vecs, all_vecs[:1]], dim=0) all_cos = torch.einsum('nd,nd->n', all_vecs[1:], all_vecs[:-1]) all_angles =", "all_vecs[1:], all_vecs[:-1]) all_angles = torch.acos_(all_cos) all_angles = all_angles.sum() return (all_angles - 2 *", "T, triangle: T, normals=None) -> T: if vs_mid.dim() == 1: vs_mid = vs_mid.unsqueeze(0)", "in weighted_p] if sample_s == SampleBy.HYB: chosen_faces_inds = torch.cat(chosen_faces_inds, dim=0) chosen_faces = faces[chosen_faces_inds]", "T_Mesh: vs, _, _, faces = igl.remove_duplicate_vertices(*mesh, epsilon) return vs, faces @igl_prepare(torch.float32) def", "1 face_normals = face_normals / face_areas_[:, None] face_areas = 0.5 * face_areas return", "def check_sign_area(*meshes: T_Mesh) -> bool: for mesh in meshes: face_normals = get_faces_normals(mesh) if", "meshes: vs_, faces_ = scale_from_ref(mesh, center, scale) meshes_.append(vs_ if faces_ is remove_me else", "in values]) min_val = min([val.min().item() for val in values]) scale = max_val -", "None: fe_iner = fe[face_ids] else: vs_ids = mesh[1][face_ids] fe_unrolled = fe[vs_ids] fe_iner =", "points.numpy() vs, faces = mesh[0].numpy(), mesh[1].numpy() winding_numbers = igl.fast_winding_number_for_meshes(vs, faces, points) winding_numbers =", "T_Mesh, boundary_indices: T, boundary_coordinates: T) -> T: boundary_indices, boundary_coordinates = boundary_indices.numpy(), boundary_coordinates.numpy() check,", "boundary_indices.numpy(), boundary_coordinates.numpy() check, uv = igl.lscm(*mesh, boundary_indices, boundary_coordinates) return uv def interpulate_vs(mesh: T_Mesh,", "len(weighted_p), replacement=True) for weights in weighted_p] if sample_s == SampleBy.HYB: chosen_faces_inds = torch.cat(chosen_faces_inds,", "T: if vs_mid.dim() == 1: vs_mid = vs_mid.unsqueeze(0) triangle = triangle.unsqueeze(0) if normals", "if to_squeeze: # fe_iner = fe_iner.squeeze_(1) return fe_iner def sample_on_faces(mesh: T_Mesh, num_samples: int)", "return inside_outside.to(device) @igl_prepare(torch.float32) def lscm(mesh: T_Mesh, boundary_indices: T, boundary_coordinates: T) -> T: boundary_indices,", "= torch.zeros(mask.shape[0], dtype=torch.int64, device=mask.device) - 1 mapper[mask] = torch.arange(mask.sum().item(), device=mask.device) return mapper def", "fe_unrolled, uvw) # if to_squeeze: # fe_iner = fe_iner.squeeze_(1) return fe_iner def sample_on_faces(mesh:", "T) -> T: mapper = torch.zeros(mask.shape[0], dtype=torch.int64, device=mask.device) - 1 mapper[mask] = torch.arange(mask.sum().item(),", "mesh, face_ids, uvw) for fe in features] return samples, face_ids, uvw def find_barycentric(vs:", "(i + 1) % 3]] for i in range(3)]).sort() raw_edges = raw_edges[0].cpu().numpy() edges", "for tensor in tensors: if type(tensor) is T: out.append(tensor.to(device, )) elif type(tensor) is", "SampleBy.HYB: weighted_p.append(torch.ones(mesh[1].shape[0], device=mesh[0].device)) chosen_faces_inds = [torch.multinomial(weights, num_samples // len(weighted_p), replacement=True) for weights in", "tensors] return params def create_mapper(mask: T) -> T: mapper = torch.zeros(mask.shape[0], dtype=torch.int64, device=mask.device)", "dtype=torch.float32), torch.tensor(faces, dtype=torch.int64) def normalize(t: T): t = t / t.norm(2, dim=1)[:, None]", "[1, 3, 5], [3, 7, 5], [0, 4, 2], [2, 4, 6]] return", "torch.from_numpy(winding_numbers) inside_outside = winding_numbers.lt(.5).float() * 2 - 1 return inside_outside.to(device) @igl_prepare(torch.float32) def lscm(mesh:", "= [samples] + [get_sampled_fe(fe, mesh, face_ids, uvw) for fe in features] return samples,", "T) -> T: if type(mesh) is not T: triangles: T = mesh[0][mesh[1]] else:", "5, 6], [5, 7, 6], [0, 1, 5], [0, 5, 4], [2, 6,", "torch.argsort(torch.rand(vs.shape[0]))[:num_samples] samples = vs[chosen_faces_inds] else: weighted_p = [] if sample_s == SampleBy.AREAS or", "Union[T, TS]) -> Union[TS, T_Mesh]: out = [] for t in tensors: if", "= 0 meshes = [(mesh, remove_me) if type(mesh) is T else mesh for", "boundary_coordinates = boundary_indices.numpy(), boundary_coordinates.numpy() check, uv = igl.lscm(*mesh, boundary_indices, boundary_coordinates) return uv def", "values]) min_val = min([val.min().item() for val in values]) scale = max_val - min_val", "faces = mesh vs_faces = vs[faces] else: vs_faces = mesh if vs_faces.shape[-1] ==", "val in values]) scale = max_val - min_val values = [(val - min_val)", "T) -> T: def compute_barycentric(ind): triangles[:, ind] = vs alpha = compute_face_areas(triangles)[0] /", "vs.norm(2, dim=1).max() vs *= scale * norm ** -1 return vs, faces def", "min_dir, max_dir, min_val, max_val = out return min_dir, max_dir, min_val, max_val def get_inside_outside(points:", "out.append(to(list(tensor), device)) else: out.append(tensor) if len(tensors) == 1: return out[0] else: return tuple(out)", "return out[0] else: return tuple(out) def clone(*tensors: Union[T, TS]) -> Union[TS, T_Mesh]: out", "-> T: gc = igl.gaussian_curvature(*mesh) return gc @igl_prepare(torch.float32) def per_vertex_normals_igl(mesh: T_Mesh, weighting: int", "*args[1:], **kwargs) return to_torch(result, device) to_torch = to_torch_singe if len(dtypes) == 1 else", "1, dtype=vs_faces.dtype, device=vs_faces.device)), dim=2) face_normals = torch.cross(vs_faces[:, 1, :] - vs_faces[:, 0, :],", "T) -> T: boundary_indices, boundary_coordinates = boundary_indices.numpy(), boundary_coordinates.numpy() check, uv = igl.lscm(*mesh, boundary_indices,", "-> T: vs, faces = mesh if edges_ind is None: edges_ind = get_edges_ind(mesh)", "T]) -> T: if type(mesh) is not T: vs, faces = mesh vs_faces", "[w, 0, 0], [0, d, 0], [w, d, 0], [0, 0, h], [w,", "# assert barycentric.sum(1).max().item() <= 1 + EPSILON return barycentric.to(device, dtype=dtype) def from_barycentric(mesh: Union[T_Mesh,", "(select + 1) % 3] all_cross = torch.cross(d_vs, d_f, dim=2) all_dots = torch.einsum('nd,nad->na',", "from constants import EPSILON import igl def scale_all(*values: T): max_val = max([val.max().item() for", "constants import EPSILON import igl def scale_all(*values: T): max_val = max([val.max().item() for val", "return (all_angles - 2 * np.pi).abs() < EPSILON def vs_over_triangle(vs_mid: T, triangle: T,", "T: def compute_barycentric(ind): triangles[:, ind] = vs alpha = compute_face_areas(triangles)[0] / areas triangles[:,", "TS, T]: out = [] for tensor in tensors: if type(tensor) is T:", "vs, faces = mesh uvw = sample_uvw([faces.shape[0], num_samples], vs.device) samples = torch.einsum('fad,fna->fnd', vs[faces],", "= min([val.min().item() for val in values]) scale = max_val - min_val values =", "return False return True def to_numpy(*tensors: T) -> ARRAYS: params = [param.detach().cpu().numpy() if", "2, 1], [1, 2, 3], [4, 5, 6], [5, 7, 6], [0, 1,", "for mesh in meshes: vs_, faces_ = scale_from_ref(mesh, center, scale) meshes_.append(vs_ if faces_", "vs -= center[None, :] return vs def to_unit_sphere(mesh: T_Mesh, in_place: bool = True,", "for t in tensors: if type(t) is T: out.append(t.clone()) else: out.append(clone(*t)) return out", "= torch.acos_(all_cos) all_angles = all_angles.sum() return (all_angles - 2 * np.pi).abs() < EPSILON", "= (max_vals - min_vals).max() / 2 center = (max_vals + min_vals) / 2", "tuple(out) def clone(*tensors: Union[T, TS]) -> Union[TS, T_Mesh]: out = [] for t", "torch.arange(vs.shape[0]) else: chosen_faces_inds = torch.argsort(torch.rand(vs.shape[0]))[:num_samples] samples = vs[chosen_faces_inds] else: weighted_p = [] if", "barycentric.to(device, dtype=dtype) def from_barycentric(mesh: Union[T_Mesh, T], face_ids: T, weights: T) -> T: if", "type(mesh) is not T: triangles: T = mesh[0][mesh[1]] else: triangles: T = mesh", "= max_val - min_val values = [(val - min_val) / scale for val", "0)] = 1 face_normals = face_normals / face_areas_[:, None] face_areas = 0.5 *", "if len(values) == 1: return values[0] return values def get_faces_normals(mesh: Union[T_Mesh, T]) ->", "not face_normals[:, 2].gt(0).all(): return False return True def to_numpy(*tensors: T) -> ARRAYS: params", "return uvw def get_sampled_fe(fe: T, mesh: T_Mesh, face_ids: T, uvw: TN) -> T:", "[get_sampled_fe(fe, mesh, face_ids, uvw) for fe in features] return samples, face_ids, uvw def", "else param for param in tensors] return params def create_mapper(mask: T) -> T:", "def get_inside_outside(points: T, mesh: T_Mesh) -> T: device = points.device points = points.numpy()", "T: # to_squeeze = if fe.dim() == 1: fe = fe.unsqueeze(1) if uvw", "vs -= center[None, :].to(vs.device) vs /= ratio if len(meshes) == 1: meshes =", "type(tensors) is List: out.append(to(list(tensor), device)) else: out.append(tensor) if len(tensors) == 1: return out[0]", "not in_place: vs = vs.clone() vs -= center[None, :] vs *= scale return", ":] - vs_faces[:, 0, :], vs_faces[:, 2, :] - vs_faces[:, 1, :]) return", "type(tensor) is T: out.append(tensor.to(device, )) elif type(tensor) is tuple or type(tensors) is List:", "vs = torch.einsum('nad,na->nd', triangles[face_ids], weights) if to_squeeze: vs = vs.squeeze(0) return vs def", "return vs def check_circle_angles(mesh: T_Mesh, center_ind: int, select: T) -> bool: vs, _", "= igl.per_vertex_normals(*mesh, weighting) return normals @igl_prepare(torch.float32, torch.int64) def remove_duplicate_vertices(mesh: T_Mesh, epsilon=1e-7) -> T_Mesh:", "face_areas, _ = compute_face_areas(mesh) face_areas[torch.isnan(face_areas)] = 0 weighted_p.append(face_areas / face_areas.sum()) if sample_s ==", "decoder @igl_prepare(torch.float32, torch.int64) def decimate_igl(mesh, num_faces: int): if mesh[1].shape[0] <= num_faces: return mesh", "Union[TS, T_Mesh]: out = [] for t in tensors: if type(t) is T:", "False return True def to_numpy(*tensors: T) -> ARRAYS: params = [param.detach().cpu().numpy() if type(param)", "values] if len(values) == 1: return values[0] return values def get_faces_normals(mesh: Union[T_Mesh, T])", "mapper[mask] = torch.arange(mask.sum().item(), device=mask.device) return mapper def mesh_center(mesh: T_Mesh): return mesh[0].mean(0) def to_center(vs):", "= get_faces_normals(mesh) if not face_normals[:, 2].gt(0).all(): return False return True def to_numpy(*tensors: T)", "uvw = sample_uvw([num_samples], vs.device) samples = torch.einsum('sf,sfd->sd', uvw, vs[chosen_faces]) return samples, chosen_faces_inds, uvw", "triangles: T = mesh[0][mesh[1]] else: triangles: T = mesh to_squeeze = weights.dim() ==", "all_dots = torch.einsum('nd,nad->na', normals, all_cross) is_over = all_dots.ge(0).long().sum(1).eq(3) return is_over def igl_prepare(*dtypes): def", "== 1 if to_squeeze: weights = weights.unsqueeze(0) face_ids = face_ids.unsqueeze(0) vs = torch.einsum('nad,na->nd',", "def per_vertex_normals_igl(mesh: T_Mesh, weighting: int = 0) -> T: normals = igl.per_vertex_normals(*mesh, weighting)", "torch.tensor(faces, dtype=torch.int64) def normalize(t: T): t = t / t.norm(2, dim=1)[:, None] return", "fe.unsqueeze(1) if uvw is None: fe_iner = fe[face_ids] else: vs_ids = mesh[1][face_ids] fe_unrolled", "0], [w, 0, 0], [0, d, 0], [w, d, 0], [0, 0, h],", "if type(mesh) is not T: triangles: T = mesh[0][mesh[1]] else: triangles: T =", "= torch.rand(*shape, device=device), torch.rand(*shape, device=device) mask = (u + v).gt(1) u[mask], v[mask] =", "meshes = [(mesh, remove_me) if type(mesh) is T else mesh for mesh in", "to_torch_singe if len(dtypes) == 1 else to_torch_multi return wrapper def to_torch_singe(result, device): return", "sample_uvw(shape, device: D): u, v = torch.rand(*shape, device=device), torch.rand(*shape, device=device) mask = (u", "normals, all_cross) is_over = all_dots.ge(0).long().sum(1).eq(3) return is_over def igl_prepare(*dtypes): def decoder(func): def wrapper(*args,", "@igl_prepare(torch.float32) def per_vertex_normals_igl(mesh: T_Mesh, weighting: int = 0) -> T: normals = igl.per_vertex_normals(*mesh,", "= -u - v + 1 uvw = torch.stack([u, v, w], dim=len(shape)) return", "faces def scale_from_ref(mesh: T_Mesh, center: T, scale: float, in_place: bool = True) ->", "= torch.arange(mask.sum().item(), device=mask.device) return mapper def mesh_center(mesh: T_Mesh): return mesh[0].mean(0) def to_center(vs): max_vals", "triangles.to(device, dtype=torch.float64) areas, _ = compute_face_areas(triangles) recover = triangles.clone() barycentric = [compute_barycentric(i) for", "tensors: if type(t) is T: out.append(t.clone()) else: out.append(clone(*t)) return out def get_box(w: float,", "int = 0) -> T: normals = igl.per_vertex_normals(*mesh, weighting) return normals @igl_prepare(torch.float32, torch.int64)", "winding_numbers = igl.fast_winding_number_for_meshes(vs, faces, points) winding_numbers = torch.from_numpy(winding_numbers) inside_outside = winding_numbers.lt(.5).float() * 2", "[2, 4, 6]] return torch.tensor(vs, dtype=torch.float32), torch.tensor(faces, dtype=torch.int64) def normalize(t: T): t =", "= args[0] device, dtype = mesh[0].device, mesh[0].dtype vs, faces = to_numpy(*mesh) result =", ":]) return face_normals def compute_face_areas(mesh: Union[T_Mesh, T]) -> TS: face_normals = get_faces_normals(mesh) face_areas", "d_f, dim=2) all_dots = torch.einsum('nd,nad->na', normals, all_cross) is_over = all_dots.ge(0).long().sum(1).eq(3) return is_over def", "T_Mesh_T, scale=1, in_place: bool = True) -> Tuple[Union[T_Mesh_T, Tuple[T_Mesh_T, ...]], Tuple[T, float]]: remove_me", "if mesh[1].shape[0] <= num_faces: return mesh vs, faces, _ = igl.remove_duplicates(*mesh, 1e-8) return", "= -u[mask] + 1, -v[mask] + 1 w = -u - v +", "*= scale * norm ** -1 return vs, faces def scale_from_ref(mesh: T_Mesh, center:", "return torch.norm(edges[:, 0] - edges[:, 1], 2, dim=1) # in place def to_unit_edge(*meshes:", "face_ids: T, weights: T) -> T: if type(mesh) is not T: triangles: T", "device=device) mask = (u + v).gt(1) u[mask], v[mask] = -u[mask] + 1, -v[mask]", "= [] scale = float(scale / max_range) for mesh in meshes: vs_, faces_", "= compute_face_areas(triangles)[0] / areas triangles[:, ind] = recover[:, ind] return alpha device, dtype", "-> T_Mesh: vs = [[0, 0, 0], [w, 0, 0], [0, d, 0],", "replacement=True) for weights in weighted_p] if sample_s == SampleBy.HYB: chosen_faces_inds = torch.cat(chosen_faces_inds, dim=0)", "= meshes[0] center = ref[0].mean(0) ratio = edge_lengths(ref).mean().item() for mesh in meshes: vs,", "return mapper def mesh_center(mesh: T_Mesh): return mesh[0].mean(0) def to_center(vs): max_vals = vs.max(0)[0] min_vals", "sample from pc uvw = None if vs.shape[0] < num_samples: chosen_faces_inds = torch.arange(vs.shape[0])", "mesh all_vecs = vs[select] - vs[center_ind][None, :] all_vecs = all_vecs / all_vecs.norm(2, 1)[:,", "len(dtypes) == 1 else to_torch_multi return wrapper def to_torch_singe(result, device): return torch.from_numpy(result).to(device, dtype=dtypes[0])", "return fe_iner def sample_on_faces(mesh: T_Mesh, num_samples: int) -> TS: vs, faces = mesh", "= igl.lscm(*mesh, boundary_indices, boundary_coordinates) return uv def interpulate_vs(mesh: T_Mesh, faces_inds: T, weights: T)", "1) % 3] all_cross = torch.cross(d_vs, d_f, dim=2) all_dots = torch.einsum('nd,nad->na', normals, all_cross)", "dtype = mesh[0].device, mesh[0].dtype vs, faces = to_numpy(*mesh) result = func((vs, faces), *args[1:],", "triangles = triangles.to(device, dtype=torch.float64) areas, _ = compute_face_areas(triangles) recover = triangles.clone() barycentric =", "or sample_s == SampleBy.HYB: weighted_p.append(torch.ones(mesh[1].shape[0], device=mesh[0].device)) chosen_faces_inds = [torch.multinomial(weights, num_samples // len(weighted_p), replacement=True)", "compute_face_areas(triangles)[0] / areas triangles[:, ind] = recover[:, ind] return alpha device, dtype =", "T, mesh: T_Mesh, face_ids: T, uvw: TN) -> T: # to_squeeze = if", "if normals is None: _, normals = compute_face_areas(triangle) select = torch.arange(3) d_vs =", "if type(t) is T: out.append(t.clone()) else: out.append(clone(*t)) return out def get_box(w: float, h:", "query = query.cpu().numpy() return igl.fast_winding_number_for_meshes(*mesh, query) @igl_prepare(torch.float32, torch.float32, torch.float32, torch.float32) def principal_curvature(mesh: T_Mesh)", "for weights in weighted_p] if sample_s == SampleBy.HYB: chosen_faces_inds = torch.cat(chosen_faces_inds, dim=0) chosen_faces", "in_place: vs = vs.clone() vs -= center[None, :] vs *= scale return vs,", "create_mapper(mask: T) -> T: mapper = torch.zeros(mask.shape[0], dtype=torch.int64, device=mask.device) - 1 mapper[mask] =", "return samples, face_ids, uvw def find_barycentric(vs: T, triangles: T) -> T: def compute_barycentric(ind):", "1], [1, 2, 3], [4, 5, 6], [5, 7, 6], [0, 1, 5],", "is List: out.append(to(list(tensor), device)) else: out.append(tensor) if len(tensors) == 1: return out[0] else:", "T_Mesh: vs, faces = mesh if not in_place: vs = vs.clone() vs -=", "center = ref[0].mean(0) ratio = edge_lengths(ref).mean().item() for mesh in meshes: vs, _ =", "SampleBy, *features: T) -> Union[T, TS]: samples, face_ids, uvw = sample_on_mesh(mesh, num_samples, sample_s=sample_s)", "torch.cross(d_vs, d_f, dim=2) all_dots = torch.einsum('nd,nad->na', normals, all_cross) is_over = all_dots.ge(0).long().sum(1).eq(3) return is_over", "vs[select] - vs[center_ind][None, :] all_vecs = all_vecs / all_vecs.norm(2, 1)[:, None] all_vecs =", "all_angles.sum() return (all_angles - 2 * np.pi).abs() < EPSILON def vs_over_triangle(vs_mid: T, triangle:", "-= center[None, :] vs *= scale return vs, faces def to_unit_cube(*meshes: T_Mesh_T, scale=1,", "vs_, faces_ = scale_from_ref(mesh, center, scale) meshes_.append(vs_ if faces_ is remove_me else (vs_,", ":, None] return vs.sum(1) def sample_uvw(shape, device: D): u, v = torch.rand(*shape, device=device),", "dim=2) face_normals = torch.cross(vs_faces[:, 1, :] - vs_faces[:, 0, :], vs_faces[:, 2, :]", "T else param for param in tensors] return params def create_mapper(mask: T) ->", "return igl.fast_winding_number_for_meshes(*mesh, query) @igl_prepare(torch.float32, torch.float32, torch.float32, torch.float32) def principal_curvature(mesh: T_Mesh) -> TS: out", "None] face_areas = 0.5 * face_areas return face_areas, face_normals def check_sign_area(*meshes: T_Mesh) ->", "fe_iner = fe_iner.squeeze_(1) return fe_iner def sample_on_faces(mesh: T_Mesh, num_samples: int) -> TS: vs,", "edge_lengths(ref).mean().item() for mesh in meshes: vs, _ = mesh vs -= center[None, :].to(vs.device)", "[torch.multinomial(weights, num_samples // len(weighted_p), replacement=True) for weights in weighted_p] if sample_s == SampleBy.HYB:", "torch.acos_(all_cos) all_angles = all_angles.sum() return (all_angles - 2 * np.pi).abs() < EPSILON def", "faces, num_faces)[1:3] @igl_prepare(torch.float32) def gaussian_curvature(mesh: T_Mesh) -> T: gc = igl.gaussian_curvature(*mesh) return gc", "to_squeeze: weights = weights.unsqueeze(0) face_ids = face_ids.unsqueeze(0) vs = torch.einsum('nad,na->nd', triangles[face_ids], weights) if", "torch.einsum('nd,nd->n', all_vecs[1:], all_vecs[:-1]) all_angles = torch.acos_(all_cos) all_angles = all_angles.sum() return (all_angles - 2", "TN = None, sample_s: SampleBy = SampleBy.HYB) -> TNS: vs, faces = mesh", "norm = vs.norm(2, dim=1).max() vs *= scale * norm ** -1 return vs,", "EPSILON def vs_over_triangle(vs_mid: T, triangle: T, normals=None) -> T: if vs_mid.dim() == 1:", "[] if sample_s == SampleBy.AREAS or sample_s == SampleBy.HYB: if face_areas is None:", "for mesh in meshes: face_normals = get_faces_normals(mesh) if not face_normals[:, 2].gt(0).all(): return False", "AREAS = 0 FACES = 1 HYB = 2 def sample_on_mesh(mesh: T_Mesh, num_samples:", "scale_from_ref(mesh, center, scale) meshes_.append(vs_ if faces_ is remove_me else (vs_, faces_)) if len(meshes_)", "torch.float32) def principal_curvature(mesh: T_Mesh) -> TS: out = igl.principal_curvature(*mesh) min_dir, max_dir, min_val, max_val", "return wrapper def to_torch_singe(result, device): return torch.from_numpy(result).to(device, dtype=dtypes[0]) def to_torch_multi(result, device): return [torch.from_numpy(r).to(device,", "1 mapper[mask] = torch.arange(mask.sum().item(), device=mask.device) return mapper def mesh_center(mesh: T_Mesh): return mesh[0].mean(0) def", "-1 return vs, faces def scale_from_ref(mesh: T_Mesh, center: T, scale: float, in_place: bool", "True, scale=1.) -> T_Mesh: vs, faces = mesh if not in_place: vs =", "-> T: if type(mesh) is not T: triangles: T = mesh[0][mesh[1]] else: triangles:", "bool = True) -> Tuple[Union[T_Mesh_T, Tuple[T_Mesh_T, ...]], Tuple[T, float]]: remove_me = 0 meshes", "T: vs, faces = mesh vs_faces = vs[faces] else: vs_faces = mesh if", "= face_areas.clone() face_areas_[torch.eq(face_areas_, 0)] = 1 face_normals = face_normals / face_areas_[:, None] face_areas", "type(mesh) is T else mesh for mesh in meshes] vs, faces = meshes[0]", "edge_lengths(mesh: T_Mesh, edges_ind: TN = None) -> T: vs, faces = mesh if", "val in values] if len(values) == 1: return values[0] return values def get_faces_normals(mesh:", "v = torch.rand(*shape, device=device), torch.rand(*shape, device=device) mask = (u + v).gt(1) u[mask], v[mask]", "i in range(3)]).sort() raw_edges = raw_edges[0].cpu().numpy() edges = {(int(edge[0]), int(edge[1])) for edge in", "== 1: return out[0] else: return tuple(out) def clone(*tensors: Union[T, TS]) -> Union[TS,", "to_torch(result, device) to_torch = to_torch_singe if len(dtypes) == 1 else to_torch_multi return wrapper", "2].gt(0).all(): return False return True def to_numpy(*tensors: T) -> ARRAYS: params = [param.detach().cpu().numpy()", "range(3)]).sort() raw_edges = raw_edges[0].cpu().numpy() edges = {(int(edge[0]), int(edge[1])) for edge in raw_edges} edges", "T) -> Union[T, TS]: samples, face_ids, uvw = sample_on_mesh(mesh, num_samples, sample_s=sample_s) if len(features)", "elif type(tensor) is tuple or type(tensors) is List: out.append(to(list(tensor), device)) else: out.append(tensor) if", "(center, ratio) def to(tensors, device: D) -> Union[T_Mesh, TS, T]: out = []", "= mesh[0].device, mesh[0].dtype vs, faces = to_numpy(*mesh) result = func((vs, faces), *args[1:], **kwargs)", "= all_dots.ge(0).long().sum(1).eq(3) return is_over def igl_prepare(*dtypes): def decoder(func): def wrapper(*args, **kwargs): mesh =", "faces = meshes[0] max_vals = vs.max(0)[0] min_vals = vs.min(0)[0] max_range = (max_vals -", "igl.fast_winding_number_for_meshes(vs, faces, points) winding_numbers = torch.from_numpy(winding_numbers) inside_outside = winding_numbers.lt(.5).float() * 2 - 1", "query) @igl_prepare(torch.float32, torch.float32, torch.float32, torch.float32) def principal_curvature(mesh: T_Mesh) -> TS: out = igl.principal_curvature(*mesh)", "= vs.min(0)[0] center = (max_vals + min_vals) / 2 vs -= center[None, :]", "T) -> ARRAYS: params = [param.detach().cpu().numpy() if type(param) is T else param for", "to_torch = to_torch_singe if len(dtypes) == 1 else to_torch_multi return wrapper def to_torch_singe(result,", "= torch.einsum('nad,na->nd', triangles[face_ids], weights) if to_squeeze: vs = vs.squeeze(0) return vs def check_circle_angles(mesh:", "SampleBy.HYB: chosen_faces_inds = torch.cat(chosen_faces_inds, dim=0) chosen_faces = faces[chosen_faces_inds] uvw = sample_uvw([num_samples], vs.device) samples", "= compute_face_areas(triangle) select = torch.arange(3) d_vs = vs_mid[:, None, :] - triangle d_f", "**kwargs) return to_torch(result, device) to_torch = to_torch_singe if len(dtypes) == 1 else to_torch_multi", "if vs_faces.shape[-1] == 2: vs_faces = torch.cat( (vs_faces, torch.zeros(*vs_faces.shape[:2], 1, dtype=vs_faces.dtype, device=vs_faces.device)), dim=2)", "max_vals = vs.max(0)[0] min_vals = vs.min(0)[0] max_range = (max_vals - min_vals).max() / 2", "normals=None) -> T: if vs_mid.dim() == 1: vs_mid = vs_mid.unsqueeze(0) triangle = triangle.unsqueeze(0)", "+ min_vals) / 2 vs -= center[None, :] return vs def to_unit_sphere(mesh: T_Mesh,", "faces_ = scale_from_ref(mesh, center, scale) meshes_.append(vs_ if faces_ is remove_me else (vs_, faces_))", "device=mask.device) - 1 mapper[mask] = torch.arange(mask.sum().item(), device=mask.device) return mapper def mesh_center(mesh: T_Mesh): return", "weights) if to_squeeze: vs = vs.squeeze(0) return vs def check_circle_angles(mesh: T_Mesh, center_ind: int,", "chosen_faces_inds = torch.arange(vs.shape[0]) else: chosen_faces_inds = torch.argsort(torch.rand(vs.shape[0]))[:num_samples] samples = vs[chosen_faces_inds] else: weighted_p =", "face_ids, uvw def find_barycentric(vs: T, triangles: T) -> T: def compute_barycentric(ind): triangles[:, ind]", "= [param.detach().cpu().numpy() if type(param) is T else param for param in tensors] return", "T) -> T: query = query.cpu().numpy() return igl.fast_winding_number_for_meshes(*mesh, query) @igl_prepare(torch.float32, torch.float32, torch.float32, torch.float32)", "scale) def get_edges_ind(mesh: T_Mesh) -> T: vs, faces = mesh raw_edges = torch.cat([faces[:,", "if type(mesh) is T else mesh for mesh in meshes] vs, faces =", "triangles[:, ind] = vs alpha = compute_face_areas(triangles)[0] / areas triangles[:, ind] = recover[:,", "3] all_cross = torch.cross(d_vs, d_f, dim=2) all_dots = torch.einsum('nd,nad->na', normals, all_cross) is_over =", "def find_barycentric(vs: T, triangles: T) -> T: def compute_barycentric(ind): triangles[:, ind] = vs", "else: triangles: T = mesh to_squeeze = weights.dim() == 1 if to_squeeze: weights", "= igl.gaussian_curvature(*mesh) return gc @igl_prepare(torch.float32) def per_vertex_normals_igl(mesh: T_Mesh, weighting: int = 0) ->", "scale for val in values] if len(values) == 1: return values[0] return values", "(u + v).gt(1) u[mask], v[mask] = -u[mask] + 1, -v[mask] + 1 w", "def principal_curvature(mesh: T_Mesh) -> TS: out = igl.principal_curvature(*mesh) min_dir, max_dir, min_val, max_val =", "out.append(tensor) if len(tensors) == 1: return out[0] else: return tuple(out) def clone(*tensors: Union[T,", "/ areas triangles[:, ind] = recover[:, ind] return alpha device, dtype = vs.device,", "Union[T_Mesh, T]) -> T: if type(mesh) is not T: vs, faces = mesh", "dim=1).max() vs *= scale * norm ** -1 return vs, faces def scale_from_ref(mesh:", "vs_faces = mesh if vs_faces.shape[-1] == 2: vs_faces = torch.cat( (vs_faces, torch.zeros(*vs_faces.shape[:2], 1,", "T: gc = igl.gaussian_curvature(*mesh) return gc @igl_prepare(torch.float32) def per_vertex_normals_igl(mesh: T_Mesh, weighting: int =", "per_vertex_normals_igl(mesh: T_Mesh, weighting: int = 0) -> T: normals = igl.per_vertex_normals(*mesh, weighting) return", "if sample_s == SampleBy.AREAS or sample_s == SampleBy.HYB: if face_areas is None: face_areas,", "else: weighted_p = [] if sample_s == SampleBy.AREAS or sample_s == SampleBy.HYB: if", "or sample_s == SampleBy.HYB: if face_areas is None: face_areas, _ = compute_face_areas(mesh) face_areas[torch.isnan(face_areas)]", "face_areas_ = face_areas.clone() face_areas_[torch.eq(face_areas_, 0)] = 1 face_normals = face_normals / face_areas_[:, None]", "= vs.max(0)[0] min_vals = vs.min(0)[0] center = (max_vals + min_vals) / 2 vs", "normals = igl.per_vertex_normals(*mesh, weighting) return normals @igl_prepare(torch.float32, torch.int64) def remove_duplicate_vertices(mesh: T_Mesh, epsilon=1e-7) ->", "1 + EPSILON return barycentric.to(device, dtype=dtype) def from_barycentric(mesh: Union[T_Mesh, T], face_ids: T, weights:", "* 2 - 1 return inside_outside.to(device) @igl_prepare(torch.float32) def lscm(mesh: T_Mesh, boundary_indices: T, boundary_coordinates:", "6], [0, 1, 5], [0, 5, 4], [2, 6, 7], [3, 2, 7],", "* face_areas return face_areas, face_normals def check_sign_area(*meshes: T_Mesh) -> bool: for mesh in", "vs, _ = mesh all_vecs = vs[select] - vs[center_ind][None, :] all_vecs = all_vecs", "= [] for t in tensors: if type(t) is T: out.append(t.clone()) else: out.append(clone(*t))", "== 2: vs_faces = torch.cat( (vs_faces, torch.zeros(*vs_faces.shape[:2], 1, dtype=vs_faces.dtype, device=vs_faces.device)), dim=2) face_normals =", "dim=1)[:, None] return t def interpolate_vs(mesh: T_Mesh, faces_inds: T, weights: T) -> T:", "face_areas return face_areas, face_normals def check_sign_area(*meshes: T_Mesh) -> bool: for mesh in meshes:", "mesh[1].shape[0] <= num_faces: return mesh vs, faces, _ = igl.remove_duplicates(*mesh, 1e-8) return igl.decimate(vs,", "= max([val.max().item() for val in values]) min_val = min([val.min().item() for val in values])", "compute_barycentric(ind): triangles[:, ind] = vs alpha = compute_face_areas(triangles)[0] / areas triangles[:, ind] =", "= compute_face_areas(mesh) face_areas[torch.isnan(face_areas)] = 0 weighted_p.append(face_areas / face_areas.sum()) if sample_s == SampleBy.FACES or", "edges = vs[edges_ind] return torch.norm(edges[:, 0] - edges[:, 1], 2, dim=1) # in", "# sample from pc uvw = None if vs.shape[0] < num_samples: chosen_faces_inds =", "face_ids, uvw = sample_on_mesh(mesh, num_samples, sample_s=sample_s) if len(features) > 0: samples = [samples]", "min_val) / scale for val in values] if len(values) == 1: return values[0]", "to_squeeze: # fe_iner = fe_iner.squeeze_(1) return fe_iner def sample_on_faces(mesh: T_Mesh, num_samples: int) ->", "return meshes_, (center, scale) def get_edges_ind(mesh: T_Mesh) -> T: vs, faces = mesh", "else: chosen_faces_inds = torch.argsort(torch.rand(vs.shape[0]))[:num_samples] samples = vs[chosen_faces_inds] else: weighted_p = [] if sample_s", "= torch.cross(d_vs, d_f, dim=2) all_dots = torch.einsum('nd,nad->na', normals, all_cross) is_over = all_dots.ge(0).long().sum(1).eq(3) return", "faces, points) winding_numbers = torch.from_numpy(winding_numbers) inside_outside = winding_numbers.lt(.5).float() * 2 - 1 return", "len(values) == 1: return values[0] return values def get_faces_normals(mesh: Union[T_Mesh, T]) -> T:", "1: meshes = meshes[0] return meshes, (center, ratio) def to(tensors, device: D) ->", "* np.pi).abs() < EPSILON def vs_over_triangle(vs_mid: T, triangle: T, normals=None) -> T: if", "vs, faces = meshes[0] max_vals = vs.max(0)[0] min_vals = vs.min(0)[0] max_range = (max_vals", "T: if type(mesh) is not T: vs, faces = mesh vs_faces = vs[faces]", "T_Mesh, num_samples: int, sample_s: SampleBy, *features: T) -> Union[T, TS]: samples, face_ids, uvw", "mapper = torch.zeros(mask.shape[0], dtype=torch.int64, device=mask.device) - 1 mapper[mask] = torch.arange(mask.sum().item(), device=mask.device) return mapper", "None: # sample from pc uvw = None if vs.shape[0] < num_samples: chosen_faces_inds", "-> T: def compute_barycentric(ind): triangles[:, ind] = vs alpha = compute_face_areas(triangles)[0] / areas", "samples = torch.einsum('sf,sfd->sd', uvw, vs[chosen_faces]) return samples, chosen_faces_inds, uvw def get_samples(mesh: T_Mesh, num_samples:", "is None: edges_ind = get_edges_ind(mesh) edges = vs[edges_ind] return torch.norm(edges[:, 0] - edges[:,", "if fe.dim() == 1: fe = fe.unsqueeze(1) if uvw is None: fe_iner =", "num_samples // len(weighted_p), replacement=True) for weights in weighted_p] if sample_s == SampleBy.HYB: chosen_faces_inds", "vs_over_triangle(vs_mid: T, triangle: T, normals=None) -> T: if vs_mid.dim() == 1: vs_mid =", "= igl.principal_curvature(*mesh) min_dir, max_dir, min_val, max_val = out return min_dir, max_dir, min_val, max_val", "if len(meshes) == 1: meshes = meshes[0] return meshes, (center, ratio) def to(tensors,", "winding_numbers.lt(.5).float() * 2 - 1 return inside_outside.to(device) @igl_prepare(torch.float32) def lscm(mesh: T_Mesh, boundary_indices: T,", "1: return values[0] return values def get_faces_normals(mesh: Union[T_Mesh, T]) -> T: if type(mesh)", "[] for tensor in tensors: if type(tensor) is T: out.append(tensor.to(device, )) elif type(tensor)", "6], [5, 7, 6], [0, 1, 5], [0, 5, 4], [2, 6, 7],", "vs = vs.clone() vs -= center[None, :] vs *= scale return vs, faces", "max_val def get_inside_outside(points: T, mesh: T_Mesh) -> T: device = points.device points =", "face_areas, face_normals def check_sign_area(*meshes: T_Mesh) -> bool: for mesh in meshes: face_normals =", "center[None, :] vs *= scale return vs, faces def to_unit_cube(*meshes: T_Mesh_T, scale=1, in_place:", "Union[T_Mesh, TS, T]: out = [] for tensor in tensors: if type(tensor) is", "face_ids, uvw) for fe in features] return samples, face_ids, uvw def find_barycentric(vs: T,", "weights.unsqueeze(0) face_ids = face_ids.unsqueeze(0) vs = torch.einsum('nad,na->nd', triangles[face_ids], weights) if to_squeeze: vs =", "= [(mesh, remove_me) if type(mesh) is T else mesh for mesh in meshes]", "values = [(val - min_val) / scale for val in values] if len(values)", "torch.arange(mask.sum().item(), device=mask.device) return mapper def mesh_center(mesh: T_Mesh): return mesh[0].mean(0) def to_center(vs): max_vals =", "sample_on_mesh(mesh: T_Mesh, num_samples: int, face_areas: TN = None, sample_s: SampleBy = SampleBy.HYB) ->", "= to_numpy(*mesh) result = func((vs, faces), *args[1:], **kwargs) return to_torch(result, device) to_torch =", "uvw: TN) -> T: # to_squeeze = if fe.dim() == 1: fe =", "samples, chosen_faces_inds, uvw def get_samples(mesh: T_Mesh, num_samples: int, sample_s: SampleBy, *features: T) ->", "_, _, faces = igl.remove_duplicate_vertices(*mesh, epsilon) return vs, faces @igl_prepare(torch.float32) def winding_number_igl(mesh: T_Mesh,", "if len(meshes_) == 1: meshes_ = meshes_[0] return meshes_, (center, scale) def get_edges_ind(mesh:", "boundary_indices: T, boundary_coordinates: T) -> T: boundary_indices, boundary_coordinates = boundary_indices.numpy(), boundary_coordinates.numpy() check, uv", "-> Tuple[Union[T_Mesh, Tuple[T_Mesh, ...]], Tuple[T, float]]: ref = meshes[0] center = ref[0].mean(0) ratio", "weights = weights.unsqueeze(0) face_ids = face_ids.unsqueeze(0) vs = torch.einsum('nad,na->nd', triangles[face_ids], weights) if to_squeeze:", "to_center(vs) norm = vs.norm(2, dim=1).max() vs *= scale * norm ** -1 return", "samples, face_ids, uvw = sample_on_mesh(mesh, num_samples, sample_s=sample_s) if len(features) > 0: samples =", "-> bool: vs, _ = mesh all_vecs = vs[select] - vs[center_ind][None, :] all_vecs", "uvw def get_sampled_fe(fe: T, mesh: T_Mesh, face_ids: T, uvw: TN) -> T: #", "T_Mesh, weighting: int = 0) -> T: normals = igl.per_vertex_normals(*mesh, weighting) return normals", "to_unit_cube(*meshes: T_Mesh_T, scale=1, in_place: bool = True) -> Tuple[Union[T_Mesh_T, Tuple[T_Mesh_T, ...]], Tuple[T, float]]:", "= torch.arange(vs.shape[0]) else: chosen_faces_inds = torch.argsort(torch.rand(vs.shape[0]))[:num_samples] samples = vs[chosen_faces_inds] else: weighted_p = []", "len(features) > 0: samples = [samples] + [get_sampled_fe(fe, mesh, face_ids, uvw) for fe", "triangle[:, select] - triangle[:, (select + 1) % 3] all_cross = torch.cross(d_vs, d_f,", "= (max_vals + min_vals) / 2 vs -= center[None, :] return vs def", "- min_vals).max() / 2 center = (max_vals + min_vals) / 2 meshes_ =", "v).gt(1) u[mask], v[mask] = -u[mask] + 1, -v[mask] + 1 w = -u", "is T else mesh for mesh in meshes] vs, faces = meshes[0] max_vals", "2], [2, 4, 6]] return torch.tensor(vs, dtype=torch.float32), torch.tensor(faces, dtype=torch.int64) def normalize(t: T): t", "0 FACES = 1 HYB = 2 def sample_on_mesh(mesh: T_Mesh, num_samples: int, face_areas:", "== 1 else to_torch_multi return wrapper def to_torch_singe(result, device): return torch.from_numpy(result).to(device, dtype=dtypes[0]) def", "T): max_val = max([val.max().item() for val in values]) min_val = min([val.min().item() for val", "min_dir, max_dir, min_val, max_val def get_inside_outside(points: T, mesh: T_Mesh) -> T: device =", "h], [0, d, h], [w, d, h]] faces = [[0, 2, 1], [1,", "face_normals = get_faces_normals(mesh) if not face_normals[:, 2].gt(0).all(): return False return True def to_numpy(*tensors:", "= meshes[0] max_vals = vs.max(0)[0] min_vals = vs.min(0)[0] max_range = (max_vals - min_vals).max()", "vs_faces.shape[-1] == 2: vs_faces = torch.cat( (vs_faces, torch.zeros(*vs_faces.shape[:2], 1, dtype=vs_faces.dtype, device=vs_faces.device)), dim=2) face_normals", "6, 7], [3, 2, 7], [1, 3, 5], [3, 7, 5], [0, 4,", "weighting: int = 0) -> T: normals = igl.per_vertex_normals(*mesh, weighting) return normals @igl_prepare(torch.float32,", "= triangle[:, select] - triangle[:, (select + 1) % 3] all_cross = torch.cross(d_vs,", "1], 2, dim=1) # in place def to_unit_edge(*meshes: T_Mesh) -> Tuple[Union[T_Mesh, Tuple[T_Mesh, ...]],", "T, uvw: TN) -> T: # to_squeeze = if fe.dim() == 1: fe", "face_ids.unsqueeze(0) vs = torch.einsum('nad,na->nd', triangles[face_ids], weights) if to_squeeze: vs = vs.squeeze(0) return vs", "int, sample_s: SampleBy, *features: T) -> Union[T, TS]: samples, face_ids, uvw = sample_on_mesh(mesh,", "SampleBy.HYB) -> TNS: vs, faces = mesh if faces is None: # sample", "def to_numpy(*tensors: T) -> ARRAYS: params = [param.detach().cpu().numpy() if type(param) is T else", "Tuple[T, float]]: remove_me = 0 meshes = [(mesh, remove_me) if type(mesh) is T", "= [] if sample_s == SampleBy.AREAS or sample_s == SampleBy.HYB: if face_areas is", "T: vs, faces = mesh raw_edges = torch.cat([faces[:, [i, (i + 1) %", "from_barycentric(mesh: Union[T_Mesh, T], face_ids: T, weights: T) -> T: if type(mesh) is not", "= boundary_indices.numpy(), boundary_coordinates.numpy() check, uv = igl.lscm(*mesh, boundary_indices, boundary_coordinates) return uv def interpulate_vs(mesh:", "is_over def igl_prepare(*dtypes): def decoder(func): def wrapper(*args, **kwargs): mesh = args[0] device, dtype", "def interpolate_vs(mesh: T_Mesh, faces_inds: T, weights: T) -> T: vs = mesh[0][mesh[1][faces_inds]] vs", "= SampleBy.HYB) -> TNS: vs, faces = mesh if faces is None: #", "num_faces: int): if mesh[1].shape[0] <= num_faces: return mesh vs, faces, _ = igl.remove_duplicates(*mesh,", "[3, 7, 5], [0, 4, 2], [2, 4, 6]] return torch.tensor(vs, dtype=torch.float32), torch.tensor(faces,", "T: out.append(t.clone()) else: out.append(clone(*t)) return out def get_box(w: float, h: float, d: float)", "2 def sample_on_mesh(mesh: T_Mesh, num_samples: int, face_areas: TN = None, sample_s: SampleBy =", "is not T: triangles: T = mesh[0][mesh[1]] else: triangles: T = mesh to_squeeze", "float, d: float) -> T_Mesh: vs = [[0, 0, 0], [w, 0, 0],", "faces = mesh raw_edges = torch.cat([faces[:, [i, (i + 1) % 3]] for", "get_box(w: float, h: float, d: float) -> T_Mesh: vs = [[0, 0, 0],", "check_sign_area(*meshes: T_Mesh) -> bool: for mesh in meshes: face_normals = get_faces_normals(mesh) if not", "def to_torch_multi(result, device): return [torch.from_numpy(r).to(device, dtype=dtype) for r, dtype in zip(result, dtypes)] return", "= [torch.multinomial(weights, num_samples // len(weighted_p), replacement=True) for weights in weighted_p] if sample_s ==", "/ all_vecs.norm(2, 1)[:, None] all_vecs = torch.cat([all_vecs, all_vecs[:1]], dim=0) all_cos = torch.einsum('nd,nd->n', all_vecs[1:],", "else: vs_ids = mesh[1][face_ids] fe_unrolled = fe[vs_ids] fe_iner = torch.einsum('sad,sa->sd', fe_unrolled, uvw) #", "= points.device points = points.numpy() vs, faces = mesh[0].numpy(), mesh[1].numpy() winding_numbers = igl.fast_winding_number_for_meshes(vs,", "norm ** -1 return vs, faces def scale_from_ref(mesh: T_Mesh, center: T, scale: float,", "type(mesh) is not T: vs, faces = mesh vs_faces = vs[faces] else: vs_faces", "select = torch.arange(3) d_vs = vs_mid[:, None, :] - triangle d_f = triangle[:,", "meshes, (center, ratio) def to(tensors, device: D) -> Union[T_Mesh, TS, T]: out =", "dtype=torch.float64) triangles = triangles.to(device, dtype=torch.float64) areas, _ = compute_face_areas(triangles) recover = triangles.clone() barycentric", "T: if type(mesh) is not T: triangles: T = mesh[0][mesh[1]] else: triangles: T", "- min_val) / scale for val in values] if len(values) == 1: return", "dtype=torch.int64, device=mask.device) - 1 mapper[mask] = torch.arange(mask.sum().item(), device=mask.device) return mapper def mesh_center(mesh: T_Mesh):", "torch.rand(*shape, device=device), torch.rand(*shape, device=device) mask = (u + v).gt(1) u[mask], v[mask] = -u[mask]", "for r, dtype in zip(result, dtypes)] return decoder @igl_prepare(torch.float32, torch.int64) def decimate_igl(mesh, num_faces:", "= mesh uvw = sample_uvw([faces.shape[0], num_samples], vs.device) samples = torch.einsum('fad,fna->fnd', vs[faces], uvw) return", "T_Mesh, center: T, scale: float, in_place: bool = True) -> T_Mesh: vs, faces", "type(param) is T else param for param in tensors] return params def create_mapper(mask:", "= points.numpy() vs, faces = mesh[0].numpy(), mesh[1].numpy() winding_numbers = igl.fast_winding_number_for_meshes(vs, faces, points) winding_numbers", "return mesh vs, faces, _ = igl.remove_duplicates(*mesh, 1e-8) return igl.decimate(vs, faces, num_faces)[1:3] @igl_prepare(torch.float32)", "barycentric.sum(1).max().item() <= 1 + EPSILON return barycentric.to(device, dtype=dtype) def from_barycentric(mesh: Union[T_Mesh, T], face_ids:", "None, :] - triangle d_f = triangle[:, select] - triangle[:, (select + 1)", "torch.int64) def decimate_igl(mesh, num_faces: int): if mesh[1].shape[0] <= num_faces: return mesh vs, faces,", "T], face_ids: T, weights: T) -> T: if type(mesh) is not T: triangles:", "def gaussian_curvature(mesh: T_Mesh) -> T: gc = igl.gaussian_curvature(*mesh) return gc @igl_prepare(torch.float32) def per_vertex_normals_igl(mesh:", "in meshes: vs, _ = mesh vs -= center[None, :].to(vs.device) vs /= ratio", "torch.einsum('sf,sfd->sd', uvw, vs[chosen_faces]) return samples, chosen_faces_inds, uvw def get_samples(mesh: T_Mesh, num_samples: int, sample_s:", "2, 3], [4, 5, 6], [5, 7, 6], [0, 1, 5], [0, 5,", "T_Mesh]: out = [] for t in tensors: if type(t) is T: out.append(t.clone())", "min_vals) / 2 vs -= center[None, :] return vs def to_unit_sphere(mesh: T_Mesh, in_place:", "int, select: T) -> bool: vs, _ = mesh all_vecs = vs[select] -", "weights: T) -> T: vs = mesh[0][mesh[1][faces_inds]] vs = vs * weights[:, :,", "-> T: normals = igl.per_vertex_normals(*mesh, weighting) return normals @igl_prepare(torch.float32, torch.int64) def remove_duplicate_vertices(mesh: T_Mesh,", "_ = igl.remove_duplicates(*mesh, 1e-8) return igl.decimate(vs, faces, num_faces)[1:3] @igl_prepare(torch.float32) def gaussian_curvature(mesh: T_Mesh) ->", "= fe_iner.squeeze_(1) return fe_iner def sample_on_faces(mesh: T_Mesh, num_samples: int) -> TS: vs, faces", "TN) -> T: # to_squeeze = if fe.dim() == 1: fe = fe.unsqueeze(1)", "type(tensor) is tuple or type(tensors) is List: out.append(to(list(tensor), device)) else: out.append(tensor) if len(tensors)", "faces = mesh if edges_ind is None: edges_ind = get_edges_ind(mesh) edges = vs[edges_ind]", "params = [param.detach().cpu().numpy() if type(param) is T else param for param in tensors]", "out def get_box(w: float, h: float, d: float) -> T_Mesh: vs = [[0,", "sample_s == SampleBy.AREAS or sample_s == SampleBy.HYB: if face_areas is None: face_areas, _", "-> TS: out = igl.principal_curvature(*mesh) min_dir, max_dir, min_val, max_val = out return min_dir,", "+ 1) % 3] all_cross = torch.cross(d_vs, d_f, dim=2) all_dots = torch.einsum('nd,nad->na', normals,", "all_cos = torch.einsum('nd,nd->n', all_vecs[1:], all_vecs[:-1]) all_angles = torch.acos_(all_cos) all_angles = all_angles.sum() return (all_angles", "def get_samples(mesh: T_Mesh, num_samples: int, sample_s: SampleBy, *features: T) -> Union[T, TS]: samples,", "ind] return alpha device, dtype = vs.device, vs.dtype vs = vs.to(device, dtype=torch.float64) triangles", "[0, d, h], [w, d, h]] faces = [[0, 2, 1], [1, 2,", "import igl def scale_all(*values: T): max_val = max([val.max().item() for val in values]) min_val", "dim=1) # in place def to_unit_edge(*meshes: T_Mesh) -> Tuple[Union[T_Mesh, Tuple[T_Mesh, ...]], Tuple[T, float]]:", "dtype = vs.device, vs.dtype vs = vs.to(device, dtype=torch.float64) triangles = triangles.to(device, dtype=torch.float64) areas,", "if type(mesh) is not T: vs, faces = mesh vs_faces = vs[faces] else:", "is None: # sample from pc uvw = None if vs.shape[0] < num_samples:", "2 * np.pi).abs() < EPSILON def vs_over_triangle(vs_mid: T, triangle: T, normals=None) -> T:", "for mesh in meshes: vs, _ = mesh vs -= center[None, :].to(vs.device) vs", "query: T) -> T: query = query.cpu().numpy() return igl.fast_winding_number_for_meshes(*mesh, query) @igl_prepare(torch.float32, torch.float32, torch.float32,", "True) -> Tuple[Union[T_Mesh_T, Tuple[T_Mesh_T, ...]], Tuple[T, float]]: remove_me = 0 meshes = [(mesh,", "= torch.stack([u, v, w], dim=len(shape)) return uvw def get_sampled_fe(fe: T, mesh: T_Mesh, face_ids:", "% 3] all_cross = torch.cross(d_vs, d_f, dim=2) all_dots = torch.einsum('nd,nad->na', normals, all_cross) is_over", "face_areas_[torch.eq(face_areas_, 0)] = 1 face_normals = face_normals / face_areas_[:, None] face_areas = 0.5", "if edges_ind is None: edges_ind = get_edges_ind(mesh) edges = vs[edges_ind] return torch.norm(edges[:, 0]", "float) -> T_Mesh: vs = [[0, 0, 0], [w, 0, 0], [0, d,", "[torch.from_numpy(r).to(device, dtype=dtype) for r, dtype in zip(result, dtypes)] return decoder @igl_prepare(torch.float32, torch.int64) def", "def interpulate_vs(mesh: T_Mesh, faces_inds: T, weights: T) -> T: vs = mesh[0][mesh[1][faces_inds]] vs", "= torch.cat([faces[:, [i, (i + 1) % 3]] for i in range(3)]).sort() raw_edges", "@igl_prepare(torch.float32) def winding_number_igl(mesh: T_Mesh, query: T) -> T: query = query.cpu().numpy() return igl.fast_winding_number_for_meshes(*mesh,", "faces), *args[1:], **kwargs) return to_torch(result, device) to_torch = to_torch_singe if len(dtypes) == 1", "face_areas[torch.isnan(face_areas)] = 0 weighted_p.append(face_areas / face_areas.sum()) if sample_s == SampleBy.FACES or sample_s ==", "# in place def to_unit_edge(*meshes: T_Mesh) -> Tuple[Union[T_Mesh, Tuple[T_Mesh, ...]], Tuple[T, float]]: ref", "samples, uvw class SampleBy(Enum): AREAS = 0 FACES = 1 HYB = 2", "num_samples, sample_s=sample_s) if len(features) > 0: samples = [samples] + [get_sampled_fe(fe, mesh, face_ids,", "2, :] - vs_faces[:, 1, :]) return face_normals def compute_face_areas(mesh: Union[T_Mesh, T]) ->", "[0, 4, 2], [2, 4, 6]] return torch.tensor(vs, dtype=torch.float32), torch.tensor(faces, dtype=torch.int64) def normalize(t:", "_ = mesh vs -= center[None, :].to(vs.device) vs /= ratio if len(meshes) ==", "lscm(mesh: T_Mesh, boundary_indices: T, boundary_coordinates: T) -> T: boundary_indices, boundary_coordinates = boundary_indices.numpy(), boundary_coordinates.numpy()", "device=mask.device) return mapper def mesh_center(mesh: T_Mesh): return mesh[0].mean(0) def to_center(vs): max_vals = vs.max(0)[0]", "igl.gaussian_curvature(*mesh) return gc @igl_prepare(torch.float32) def per_vertex_normals_igl(mesh: T_Mesh, weighting: int = 0) -> T:", "to_unit_edge(*meshes: T_Mesh) -> Tuple[Union[T_Mesh, Tuple[T_Mesh, ...]], Tuple[T, float]]: ref = meshes[0] center =", "remove_me = 0 meshes = [(mesh, remove_me) if type(mesh) is T else mesh", "Tuple[T_Mesh_T, ...]], Tuple[T, float]]: remove_me = 0 meshes = [(mesh, remove_me) if type(mesh)", "= scale_from_ref(mesh, center, scale) meshes_.append(vs_ if faces_ is remove_me else (vs_, faces_)) if", "def decimate_igl(mesh, num_faces: int): if mesh[1].shape[0] <= num_faces: return mesh vs, faces, _", "mesh vs_faces = vs[faces] else: vs_faces = mesh if vs_faces.shape[-1] == 2: vs_faces", "T]: out = [] for tensor in tensors: if type(tensor) is T: out.append(tensor.to(device,", "to_torch_singe(result, device): return torch.from_numpy(result).to(device, dtype=dtypes[0]) def to_torch_multi(result, device): return [torch.from_numpy(r).to(device, dtype=dtype) for r,", "T_Mesh, face_ids: T, uvw: TN) -> T: # to_squeeze = if fe.dim() ==", "[] scale = float(scale / max_range) for mesh in meshes: vs_, faces_ =", "= vs[select] - vs[center_ind][None, :] all_vecs = all_vecs / all_vecs.norm(2, 1)[:, None] all_vecs", "= None) -> T: vs, faces = mesh if edges_ind is None: edges_ind", "T_Mesh) -> T: device = points.device points = points.numpy() vs, faces = mesh[0].numpy(),", "- triangle d_f = triangle[:, select] - triangle[:, (select + 1) % 3]", "points) winding_numbers = torch.from_numpy(winding_numbers) inside_outside = winding_numbers.lt(.5).float() * 2 - 1 return inside_outside.to(device)", "torch.stack(barycentric, dim=1) # assert barycentric.sum(1).max().item() <= 1 + EPSILON return barycentric.to(device, dtype=dtype) def", "% 3]] for i in range(3)]).sort() raw_edges = raw_edges[0].cpu().numpy() edges = {(int(edge[0]), int(edge[1]))", "edges = {(int(edge[0]), int(edge[1])) for edge in raw_edges} edges = torch.tensor(list(edges), dtype=torch.int64, device=faces.device)", "-> Tuple[Union[T_Mesh_T, Tuple[T_Mesh_T, ...]], Tuple[T, float]]: remove_me = 0 meshes = [(mesh, remove_me)", "device, dtype = mesh[0].device, mesh[0].dtype vs, faces = to_numpy(*mesh) result = func((vs, faces),", "-> TNS: vs, faces = mesh if faces is None: # sample from", "= ref[0].mean(0) ratio = edge_lengths(ref).mean().item() for mesh in meshes: vs, _ = mesh", "get_sampled_fe(fe: T, mesh: T_Mesh, face_ids: T, uvw: TN) -> T: # to_squeeze =", "sample_uvw([faces.shape[0], num_samples], vs.device) samples = torch.einsum('fad,fna->fnd', vs[faces], uvw) return samples, uvw class SampleBy(Enum):", "SampleBy.HYB: if face_areas is None: face_areas, _ = compute_face_areas(mesh) face_areas[torch.isnan(face_areas)] = 0 weighted_p.append(face_areas", "faces_ is remove_me else (vs_, faces_)) if len(meshes_) == 1: meshes_ = meshes_[0]", "= triangles.to(device, dtype=torch.float64) areas, _ = compute_face_areas(triangles) recover = triangles.clone() barycentric = [compute_barycentric(i)", "bool: for mesh in meshes: face_normals = get_faces_normals(mesh) if not face_normals[:, 2].gt(0).all(): return", "-> T_Mesh: vs, faces = mesh if not in_place: vs = vs.clone() vs", "- 1 return inside_outside.to(device) @igl_prepare(torch.float32) def lscm(mesh: T_Mesh, boundary_indices: T, boundary_coordinates: T) ->", "out = [] for tensor in tensors: if type(tensor) is T: out.append(tensor.to(device, ))", "0], [w, d, 0], [0, 0, h], [w, 0, h], [0, d, h],", "vs, faces = mesh[0].numpy(), mesh[1].numpy() winding_numbers = igl.fast_winding_number_for_meshes(vs, faces, points) winding_numbers = torch.from_numpy(winding_numbers)", "T_Mesh, faces_inds: T, weights: T) -> T: vs = mesh[0][mesh[1][faces_inds]] vs = vs", "return vs, faces @igl_prepare(torch.float32) def winding_number_igl(mesh: T_Mesh, query: T) -> T: query =", "_, normals = compute_face_areas(triangle) select = torch.arange(3) d_vs = vs_mid[:, None, :] -", "6]] return torch.tensor(vs, dtype=torch.float32), torch.tensor(faces, dtype=torch.int64) def normalize(t: T): t = t /", "compute_face_areas(triangles) recover = triangles.clone() barycentric = [compute_barycentric(i) for i in range(3)] barycentric =", "triangles[face_ids], weights) if to_squeeze: vs = vs.squeeze(0) return vs def check_circle_angles(mesh: T_Mesh, center_ind:", "T_Mesh: vs, faces = mesh if not in_place: vs = vs.clone() vs =", "fe_iner.squeeze_(1) return fe_iner def sample_on_faces(mesh: T_Mesh, num_samples: int) -> TS: vs, faces =", "+ min_vals) / 2 meshes_ = [] scale = float(scale / max_range) for", "def to_torch_singe(result, device): return torch.from_numpy(result).to(device, dtype=dtypes[0]) def to_torch_multi(result, device): return [torch.from_numpy(r).to(device, dtype=dtype) for", "torch.einsum('sad,sa->sd', fe_unrolled, uvw) # if to_squeeze: # fe_iner = fe_iner.squeeze_(1) return fe_iner def", "-> TS: vs, faces = mesh uvw = sample_uvw([faces.shape[0], num_samples], vs.device) samples =", "meshes[0] return meshes, (center, ratio) def to(tensors, device: D) -> Union[T_Mesh, TS, T]:", "sample_s=sample_s) if len(features) > 0: samples = [samples] + [get_sampled_fe(fe, mesh, face_ids, uvw)", "weights[:, :, None] return vs.sum(1) def sample_uvw(shape, device: D): u, v = torch.rand(*shape,", "num_samples: chosen_faces_inds = torch.arange(vs.shape[0]) else: chosen_faces_inds = torch.argsort(torch.rand(vs.shape[0]))[:num_samples] samples = vs[chosen_faces_inds] else: weighted_p", ":] return vs def to_unit_sphere(mesh: T_Mesh, in_place: bool = True, scale=1.) -> T_Mesh:", "all_vecs[:-1]) all_angles = torch.acos_(all_cos) all_angles = all_angles.sum() return (all_angles - 2 * np.pi).abs()", "= if fe.dim() == 1: fe = fe.unsqueeze(1) if uvw is None: fe_iner", "[[0, 2, 1], [1, 2, 3], [4, 5, 6], [5, 7, 6], [0,", "h], [w, 0, h], [0, d, h], [w, d, h]] faces = [[0,", "dtype=torch.int64, device=faces.device) return edges def edge_lengths(mesh: T_Mesh, edges_ind: TN = None) -> T:", "d, 0], [0, 0, h], [w, 0, h], [0, d, h], [w, d,", "def mesh_center(mesh: T_Mesh): return mesh[0].mean(0) def to_center(vs): max_vals = vs.max(0)[0] min_vals = vs.min(0)[0]", "@igl_prepare(torch.float32, torch.int64) def remove_duplicate_vertices(mesh: T_Mesh, epsilon=1e-7) -> T_Mesh: vs, _, _, faces =", "= torch.cross(vs_faces[:, 1, :] - vs_faces[:, 0, :], vs_faces[:, 2, :] - vs_faces[:,", "if len(tensors) == 1: return out[0] else: return tuple(out) def clone(*tensors: Union[T, TS])", "igl.remove_duplicate_vertices(*mesh, epsilon) return vs, faces @igl_prepare(torch.float32) def winding_number_igl(mesh: T_Mesh, query: T) -> T:", "w = -u - v + 1 uvw = torch.stack([u, v, w], dim=len(shape))", "faces = mesh if faces is None: # sample from pc uvw =", "triangle: T, normals=None) -> T: if vs_mid.dim() == 1: vs_mid = vs_mid.unsqueeze(0) triangle", "== 1: meshes = meshes[0] return meshes, (center, ratio) def to(tensors, device: D)", "-> T: boundary_indices, boundary_coordinates = boundary_indices.numpy(), boundary_coordinates.numpy() check, uv = igl.lscm(*mesh, boundary_indices, boundary_coordinates)", "- triangle[:, (select + 1) % 3] all_cross = torch.cross(d_vs, d_f, dim=2) all_dots", "TS]: samples, face_ids, uvw = sample_on_mesh(mesh, num_samples, sample_s=sample_s) if len(features) > 0: samples", "vs, _ = mesh vs -= center[None, :].to(vs.device) vs /= ratio if len(meshes)", "sample_s == SampleBy.HYB: if face_areas is None: face_areas, _ = compute_face_areas(mesh) face_areas[torch.isnan(face_areas)] =", "clone(*tensors: Union[T, TS]) -> Union[TS, T_Mesh]: out = [] for t in tensors:" ]
[ "views as filebrowser_views urlpatterns = [ url(r'^$', filebrowser_views.index, name='filebrowser'), url(r'^api/$', filebrowser_views.api_single_pandaid, name='filebrowser-api-single-pandaid'), url(r'^delete/$',", "static from django.contrib import admin ### #FIXME admin.autodiscover() import views as filebrowser_views urlpatterns", "from django.conf.urls import include, url from django.conf import settings from django.conf.urls.static import static", "import admin ### #FIXME admin.autodiscover() import views as filebrowser_views urlpatterns = [ url(r'^$',", "from django.conf import settings from django.conf.urls.static import static from django.contrib import admin ###", "<gh_stars>0 \"\"\" filebrowser.urls \"\"\" from django.conf.urls import include, url from django.conf import settings", "\"\"\" from django.conf.urls import include, url from django.conf import settings from django.conf.urls.static import", "\"\"\" filebrowser.urls \"\"\" from django.conf.urls import include, url from django.conf import settings from", "django.contrib import admin ### #FIXME admin.autodiscover() import views as filebrowser_views urlpatterns = [", "import views as filebrowser_views urlpatterns = [ url(r'^$', filebrowser_views.index, name='filebrowser'), url(r'^api/$', filebrowser_views.api_single_pandaid, name='filebrowser-api-single-pandaid'),", "filebrowser.urls \"\"\" from django.conf.urls import include, url from django.conf import settings from django.conf.urls.static", "urlpatterns = [ url(r'^$', filebrowser_views.index, name='filebrowser'), url(r'^api/$', filebrowser_views.api_single_pandaid, name='filebrowser-api-single-pandaid'), url(r'^delete/$', filebrowser_views.delete_files, name='filebrowser-delete'), ]", "url from django.conf import settings from django.conf.urls.static import static from django.contrib import admin", "django.conf.urls.static import static from django.contrib import admin ### #FIXME admin.autodiscover() import views as", "filebrowser_views urlpatterns = [ url(r'^$', filebrowser_views.index, name='filebrowser'), url(r'^api/$', filebrowser_views.api_single_pandaid, name='filebrowser-api-single-pandaid'), url(r'^delete/$', filebrowser_views.delete_files, name='filebrowser-delete'),", "admin ### #FIXME admin.autodiscover() import views as filebrowser_views urlpatterns = [ url(r'^$', filebrowser_views.index,", "import settings from django.conf.urls.static import static from django.contrib import admin ### #FIXME admin.autodiscover()", "django.conf.urls import include, url from django.conf import settings from django.conf.urls.static import static from", "django.conf import settings from django.conf.urls.static import static from django.contrib import admin ### #FIXME", "import static from django.contrib import admin ### #FIXME admin.autodiscover() import views as filebrowser_views", "from django.conf.urls.static import static from django.contrib import admin ### #FIXME admin.autodiscover() import views", "### #FIXME admin.autodiscover() import views as filebrowser_views urlpatterns = [ url(r'^$', filebrowser_views.index, name='filebrowser'),", "settings from django.conf.urls.static import static from django.contrib import admin ### #FIXME admin.autodiscover() import", "#FIXME admin.autodiscover() import views as filebrowser_views urlpatterns = [ url(r'^$', filebrowser_views.index, name='filebrowser'), url(r'^api/$',", "admin.autodiscover() import views as filebrowser_views urlpatterns = [ url(r'^$', filebrowser_views.index, name='filebrowser'), url(r'^api/$', filebrowser_views.api_single_pandaid,", "import include, url from django.conf import settings from django.conf.urls.static import static from django.contrib", "include, url from django.conf import settings from django.conf.urls.static import static from django.contrib import", "from django.contrib import admin ### #FIXME admin.autodiscover() import views as filebrowser_views urlpatterns =", "as filebrowser_views urlpatterns = [ url(r'^$', filebrowser_views.index, name='filebrowser'), url(r'^api/$', filebrowser_views.api_single_pandaid, name='filebrowser-api-single-pandaid'), url(r'^delete/$', filebrowser_views.delete_files," ]
[ "sampling (True) or minibatch weighted sampling (False) kwargs: Additional arguments for `BaseLoss`, e.g.", "loss. Bernoulli corresponds to a binary cross entropy (bse), Gaussian corresponds to MSE,", "Shape : (batch_size, n_chan, height, width). latent_dist : tuple of torch.tensor sufficient statistics", "weighted sampling (False) kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1]", "with the following commented out code after viz is fixed # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 if", "Advances in Neural Information Processing Systems. 2018. \"\"\" batch_size = latent_dist.size(0) _logqz =", "_reconstruction_loss(data1, recon_batch, storer=storer, distribution=self.rec_dist) # TODO: remove this kl_loss term once viz is", "gamma self.C_init = C_init self.C_fin = C_fin self.C_n_interp = C_n_interp def __call__(self, data,", "\"\"\" Compute the decomposed KL loss with either minibatch weighted sampling or minibatch", "Gaussian distribution corresponds to MSE, and is sometimes used, but hard to train", "**kwargs_all) elif name == \"betaB\": return BetaBLoss(C_init=kwargs_parse[\"betaB_initC\"], C_fin=kwargs_parse[\"betaB_finC\"], C_n_interp=kwargs_parse[\"betaB_stepsC\"], gamma=kwargs_parse[\"betaB_G\"], **kwargs_all) elif name", "in which to store important variables for vizualisation. Returns ------- loss : torch.Tensor", "loss = F.mse_loss(recon_data * 255, data * 255, reduction=\"sum\") / 255 elif distribution", "= torch.zeros(half_batch_size, latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample1, prior_params, half_batch_size, return_matrix=False).view(half_batch_size, -1).sum(1) if not", "celeba: 15 self.device = device self.dataset_size = data_size self.beta = beta self.alpha =", "rec_dist`. References ---------- [1] Chen, <NAME>, et al. \"Isolating sources of disentanglement in", "= torch.logsumexp(logiw_matrix.view(batch_size, batch_size, 1) + _logqz, dim=1, keepdim=False).sum(1) return logqz, logqz_prodmarginals def _reconstruction_loss(data,", "\"betaH\": return BetaHLoss(beta=kwargs_parse[\"betaH_B\"], **kwargs_all) elif name == \"VAE\": return BetaHLoss(beta=1, **kwargs_all) elif name", "loss = rec_loss + anneal_rec * (self.alpha * mi_loss + self.beta * tc_loss", "beta has to be increased by one for correct comparaison # # as", "and (0.4,0.5), which might not be optimal. Gaussian distribution corresponds to MSE, and", "is_train, storer): storer = self._pre_call(is_train, storer) rec_loss = _reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist) kl_loss", "trick shape : (batch_size, latent_dim). References ---------- [1] <NAME>, and <NAME>. \"Disentangling by", "is_train or self.n_train_steps % self.record_loss_every == 1: storer = storer else: storer =", ": (batch_size, latent_dim). References ---------- [1] <NAME>, and <NAME>. \"Disentangling by factorising.\" arXiv", "given the argparse arguments.\"\"\" kwargs_all = dict(rec_dist=kwargs_parse[\"rec_dist\"], steps_anneal=kwargs_parse[\"reg_anneal\"]) if name == \"betaH\": return", "log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=False).sum(dim=1) # calculate log p(z) prior_params = torch.zeros(batch_size, latent_dist.size(1), 2).to(self.device)", "marginals of q(z_j)) with minibatch weighted sampling. Parameters ---------- latent_dist : torch.Tensor Mean", "is_mss : bool Selects either minibatch stratified sampling (True) or minibatch weighted sampling", "as per Algorithm 2 of [1] Parameters ---------- device : torch.device beta :", "= None return storer class BetaHLoss(BaseLoss): \"\"\" Compute the Beta-VAE loss as in", "# loss in [0,255] space but normalized by 255 to not be too", "training iterations for interpolating C. gamma : float, optional Weight of the KL", "It has the issue that it doesn't penalize the same way (0.1,0.2) and", "return FactorKLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], gamma=kwargs_parse[\"factor_G\"], is_mutual_info=not kwargs_parse[\"no_mutual_info\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) elif name == \"batchTC\":", "+ anneal_rec * (self.beta * kl_loss) if storer is not None: storer['loss'].append(loss.item()) return", "C_init=0., C_fin=5., C_n_interp=25000, gamma=30., **kwargs): super().__init__(**kwargs) self.gamma = gamma self.C_init = C_init self.C_fin", "elif distribution == \"gaussian\": # loss in [0,255] space but normalized by 255", "== 1: storer = storer else: storer = None return storer class BetaHLoss(BaseLoss):", "z in range(dim_z): pi = torch.randperm(batch_size).to(latent_sample.device) perm[:, z] = latent_sample[pi, z] return perm", "[0,255] space but normalized by 255 to not be too big but #", "(logqz_condx - logqz).mean() tc_loss = (logqz - logqz_prodmarginals).mean() dw_kl_loss = (logqz_prodmarginals - logpz).mean()", "name == \"factor\": return FactorKLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], gamma=kwargs_parse[\"factor_G\"], is_mutual_info=not kwargs_parse[\"no_mutual_info\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) elif", "pixel. Implicitely defines the reconstruction loss. Bernoulli corresponds to a binary cross entropy", "+ self.beta * tc_loss + self.gamma * dw_kl_loss) if storer is not None:", "diagonal covariance and a unit normal distribution. Parameters ---------- mean : torch.Tensor Mean", "batch_size, return_matrix=True) logqz_prodmarginals = (torch.logsumexp(_logqz, dim=1, keepdim=False) - math.log(batch_size * data_size)).sum(dim=1) logqz =", "most commonly used. It has the issue that it doesn't penalize the same", "\"\"\" def __init__(self, record_loss_every=50, rec_dist=\"bernoulli\", steps_anneal=0): self.n_train_steps = 0 self.record_loss_every = record_loss_every self.rec_dist", "Parameters ---------- latent_dist : torch.Tensor Mean and logvar of the normal distribution. Shape", "Shape : (batch_size, n_chan, height, width). distribution : {\"bernoulli\", \"gaussian\", \"laplace\"} Distribution of", "e.g. rec_dist`. References ---------- [1] Kim, Hyunjik, and <NAME>. \"Disentangling by factorising.\" arXiv", "`BaseLoss`, e.g. rec_dist`. References ---------- [1] Kim, Hyunjik, and <NAME>. \"Disentangling by factorising.\"", "by 255 to not be too big but # multiply by 255 and", "storer=storer, distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer) C = (linear_annealing(self.C_init, self.C_fin, self.n_train_steps, self.C_n_interp) if", "self.n_train_steps % self.record_loss_every == 1: storer = storer else: storer = None return", "half_batch_size, return_matrix=False).view(half_batch_size, -1).sum(1) if not self.is_mss: # minibatch weighted sampling _, logqz_prodmarginals =", "Laplace corresponds to L1. steps_anneal: nool, optional Number of annealing steps where gradually", "if not self.is_mss: # minibatch weighted sampling logqz, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample, self.dataset_size)", "tc_loss = (F.logsigmoid(d_z) - F.logsigmoid(1 - d_z)).clamp(0).mean() anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal)", "be optimal. Gaussian distribution corresponds to MSE, and is sometimes used, but hard", "optimizer self.optimizer_d.zero_grad() d_tc_loss.backward() self.optimizer_d.step() if storer is not None: storer['discrim_loss'].append(d_tc_loss.item()) return vae_loss class", "logqz_prodmarginals def _minibatch_stratified_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates log q(z) and the log (product", "of marginals of q(z_j)) with minibatch stratified sampling. Parameters ---------- latent_dist : torch.Tensor", "if storer is not None: storer['loss'].append(loss.item()) storer['mi_loss'].append(mi_loss.item()) storer['tc_loss'].append(tc_loss.item()) storer['dw_kl_loss'].append(dw_kl_loss.item()) # TODO Remove this", "variational autoencoders.\" Advances in Neural Information Processing Systems. 2018. \"\"\" batch_size = latent_dist.size(0)", "storer) rec_loss = _reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer) anneal_rec =", "normalized by 255 to not be too big loss = F.mse_loss(recon_data * 255,", "is_mutual_info=True, is_mss=False, disc_kwargs=dict(neg_slope=0.2, latent_dim=10, hidden_units=1000), optim_kwargs=dict(lr=5e-4, betas=(0.5, 0.9)), **kwargs): super().__init__(**kwargs) self.gamma = gamma", "# Get second sample of latent distribution latent_sample2 = model.sample_latent(data2) z_perm = _permute_dims(latent_sample2).detach()", "bool Selects either minibatch stratified sampling (True) or minibatch weighted sampling (False) kwargs:", "torch import optim from .discriminator import Discriminator from disvae.utils.math import log_density_normal, log_importance_weight_matrix #", "of the normal distribution. Shape (batch_size, latent_dim, 2) latent_sample: torch.Tensor sample from the", "data_size)).sum(dim=1) logqz = torch.logsumexp(_logqz.sum(2), dim=1, keepdim=False) \\ - math.log(batch_size * data_size) return logqz,", "storer = self._pre_call(is_train, storer) rec_loss = _reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist,", "# Run VAE optimizer optimizer.zero_grad() vae_loss.backward(retain_graph=True) optimizer.step() # Discriminator Loss # Get second", "a unit normal distribution. Parameters ---------- mean : torch.Tensor Mean of the normal", "batch_size, return_matrix=True) logiw_matrix = log_importance_weight_matrix(batch_size, data_size).to(latent_dist.device) logqz = torch.logsumexp(logiw_matrix + _logqz.sum(2), dim=1, keepdim=False)", "logqz_prodmarginals).mean() dw_kl_loss = (logqz_prodmarginals - logpz).mean() anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if", "as not doing anything for L1 loss = F.l1_loss(recon_data, data, reduction=\"sum\") else: raise", "/ batch_size if storer is not None: storer['recon_loss'].append(loss.item()) return loss def _kl_normal_loss(mean, logvar,", "TC loss term. `gamma` in the paper. is_mutual_info : bool True : includes", "and <NAME>. \"Disentangling by factorising.\" arXiv preprint arXiv:1802.05983 (2018). \"\"\" perm = torch.zeros_like(latent_sample)", "Randomly permutes the sample from q(z) (latent_dist) across the batch for each of", "is_train else 1) # total loss loss = rec_loss + anneal_rec * (self.alpha", "gamma=1., is_mss=False, **kwargs): super().__init__(**kwargs) # beta values: dsprites: 6, celeba: 15 self.device =", "device, data_size, alpha=1., beta=6., gamma=1., is_mss=False, **kwargs): super().__init__(**kwargs) # beta values: dsprites: 6,", "if is_train else 1) loss = rec_loss + anneal_rec * (self.beta * kl_loss)", "if is_train else 1) # total loss loss = rec_loss + anneal_rec *", "viz is sorted # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 kl_loss = _kl_normal_loss(*latent_dist, storer) d_z = self.discriminator(latent_sample1) #", "kwargs_parse[\"data_size\"], alpha=kwargs_parse[\"batchTC_A\"], beta=kwargs_parse[\"batchTC_B\"], gamma=kwargs_parse[\"batchTC_G\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) else: raise ValueError(\"Uknown loss : {}\".format(name))", "= gamma self.is_mss = is_mss # minibatch stratified sampling def __call__(self, data, recon_batch,", "log (product of marginals of q(z_j)) with minibatch stratified sampling. Parameters ---------- latent_dist", "fixed # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 if self.is_mutual_info: # return vae loss vae_loss = rec_loss +", "def __call__(self, data, recon_batch, latent_dist, is_train, storer, latent_sample=None): storer = self._pre_call(is_train, storer) batch_size", "self.C_fin = C_fin self.C_n_interp = C_n_interp def __call__(self, data, recon_data, latent_dist, is_train, storer):", "for L1 loss = F.l1_loss(recon_data, data, reduction=\"sum\") else: raise ValueError(\"Unkown distribution: {}\".format(distribution)) loss", "to L1. steps_anneal: nool, optional Number of annealing steps where gradually adding the", "Calculates the per image reconstruction loss for a batch of data. Parameters ----------", "str(i)].append(tc_loss_vec[i].item()) return loss def _minibatch_weighted_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates log q(z) and the", "term # change latent dist to torch.tensor (could probably avoid this) latent_dist =", "Beta-VAE loss as in [1] Parameters ---------- C_init : float, optional Starting annealed", "data_size self.beta = beta self.alpha = alpha self.gamma = gamma self.is_mss = is_mss", "et al. \"beta-vae: Learning basic visual concepts with a constrained variational framework.\" (2016).", "log_density_normal(latent_sample, prior_params, batch_size, return_matrix=False).view(batch_size, -1).sum(1) if not self.is_mss: # minibatch weighted sampling logqz,", "corresponds to MSE, and is sometimes used, but hard to train ecause it", "elif name == \"betaB\": return BetaBLoss(C_init=kwargs_parse[\"betaB_initC\"], C_fin=kwargs_parse[\"betaB_finC\"], C_n_interp=kwargs_parse[\"betaB_stepsC\"], gamma=kwargs_parse[\"betaB_G\"], **kwargs_all) elif name ==", "Estimates log q(z) and the log (product of marginals of q(z_j)) with minibatch", "latent_dist, is_train, storer): storer = self._pre_call(is_train, storer) rec_loss = _reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist)", "= 0 self.record_loss_every = record_loss_every self.rec_dist = rec_dist self.steps_anneal = steps_anneal @abc.abstractmethod def", "or minibatch stratified sampling according to [1] Parameters ---------- data_size: int Size of", "calculate log p(z) prior_params = torch.zeros(half_batch_size, latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample1, prior_params, half_batch_size,", "{\"bernoulli\", \"gaussian\", \"laplace\"}, optional Reconstruction distribution istribution of the likelihood on the each", "\"laplace\"}, optional Reconstruction distribution istribution of the likelihood on the each pixel. Implicitely", "return vae_loss class BatchTCLoss(BaseLoss): \"\"\" Compute the decomposed KL loss with either minibatch", "loss = rec_loss + anneal_rec * (self.beta * kl_loss) if storer is not", "* (F.logsigmoid(d_z) + F.logsigmoid(1 - d_z_perm))).mean() # Run discriminator optimizer self.optimizer_d.zero_grad() d_tc_loss.backward() self.optimizer_d.step()", "Shape (batch_size, latent_dim) where D is dimension of distribution. logvar : torch.Tensor Diagonal", "half_batch_size = batch_size // 2 data = data.split(half_batch_size) data1 = data[0] data2 =", "0: return fin assert fin > init delta = fin - init annealed", "15 self.device = device self.dataset_size = data_size self.beta = beta self.alpha = alpha", "(mean and log_var). Parameters ---------- latent_sample: torch.Tensor sample from the latent dimension using", "factor-vae split data into two batches. In the paper they sample 2 batches", "\"bernoulli\": loss = F.binary_cross_entropy(recon_data, data, reduction=\"sum\") elif distribution == \"gaussian\": # loss in", "prior_params = torch.zeros(half_batch_size, latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample1, prior_params, half_batch_size, return_matrix=False).view(half_batch_size, -1).sum(1) if", "annealing of a parameter.\"\"\" if annealing_steps == 0: return fin assert fin >", "(-1 - logvar + mean.pow(2) + logvar.exp()).mean(dim=0) total_kl = latent_kl.sum() if storer is", "of the latent dimensions (mean and log_var). Parameters ---------- latent_sample: torch.Tensor sample from", "1 # kl_loss = _dimwise_kl_loss(*latent_dist, storer) # # vae_loss = rec_loss + kl_loss", "latent_sample, data_size): \"\"\" Estimates log q(z) and the log (product of marginals of", "References ---------- [1] Chen, <NAME>, et al. \"Isolating sources of disentanglement in variational", "latent_dist : torch.Tensor Mean and logvar of the normal distribution. Shape (batch_size, latent_dim,", "class BatchTCLoss(BaseLoss): \"\"\" Compute the decomposed KL loss with either minibatch weighted sampling", "- logvar + mean.pow(2) + logvar.exp()).mean(dim=0) total_kl = latent_kl.sum() if storer is not", "Discriminator from disvae.utils.math import log_density_normal, log_importance_weight_matrix # TO-DO: clean data_size and device def", "where gradually adding the regularisation. \"\"\" def __init__(self, record_loss_every=50, rec_dist=\"bernoulli\", steps_anneal=0): self.n_train_steps =", "the correct loss function given the argparse arguments.\"\"\" kwargs_all = dict(rec_dist=kwargs_parse[\"rec_dist\"], steps_anneal=kwargs_parse[\"reg_anneal\"]) if", "for vizualisation. Returns ------- loss : torch.Tensor Per image cross entropy (i.e. normalized", "a few pixels that are very wrong. Laplace distribution corresponds to L1 solves", "self.gamma + 1 dw_kl_loss = (logqz_prodmarginals - logpz).mean() vae_loss = rec_loss + anneal_rec", "of the kl divergence. References: [1] Higgins, Irina, et al. \"beta-vae: Learning basic", "return loss def _kl_normal_loss(mean, logvar, storer=None): \"\"\" Calculates the KL divergence between a", "# beta values: dsprites: 6, celeba: 15 self.device = device self.dataset_size = data_size", "q(z_j)) with minibatch weighted sampling. Parameters ---------- latent_dist : torch.Tensor Mean and logvar", "change latent dist to torch.tensor (could probably avoid this) latent_dist = torch.stack((latent_dist[0], latent_dist[1]),", "* (self.beta * kl_loss) if storer is not None: storer['loss'].append(loss.item()) return loss class", "Dictionary in which to store important variables for vizualisation. \"\"\" latent_dim = mean.size(1)", "rec_dist self.steps_anneal = steps_anneal @abc.abstractmethod def __call__(self, data, recon_data, latent_dist, is_train, storer): \"\"\"", "[1] Chen, <NAME>, et al. \"Isolating sources of disentanglement in variational autoencoders.\" Advances", "$\\beta$-VAE.\" arXiv preprint arXiv:1804.03599 (2018). \"\"\" def __init__(self, C_init=0., C_fin=5., C_n_interp=25000, gamma=30., **kwargs):", "gamma=40., is_mutual_info=True, is_mss=False, disc_kwargs=dict(neg_slope=0.2, latent_dim=10, hidden_units=1000), optim_kwargs=dict(lr=5e-4, betas=(0.5, 0.9)), **kwargs): super().__init__(**kwargs) self.gamma =", "C_init self.C_fin = C_fin self.C_n_interp = C_n_interp def __call__(self, data, recon_data, latent_dist, is_train,", "covariance and a unit normal distribution. Parameters ---------- mean : torch.Tensor Mean of", "math.log(batch_size * data_size)).sum(dim=1) logqz = torch.logsumexp(_logqz.sum(2), dim=1, keepdim=False) \\ - math.log(batch_size * data_size)", "of the total correlation term. gamma : float Weight of the dimension-wise KL", "if storer is not None: storer['discrim_loss'].append(d_tc_loss.item()) return vae_loss class BatchTCLoss(BaseLoss): \"\"\" Compute the", "= (torch.logsumexp(_logqz, dim=1, keepdim=False) - math.log(batch_size * data_size)).sum(dim=1) logqz = torch.logsumexp(_logqz.sum(2), dim=1, keepdim=False)", "import math import torch from torch.nn import functional as F from torch import", "\"\"\" def __init__(self, beta=4, **kwargs): super().__init__(**kwargs) self.beta = beta def __call__(self, data, recon_data,", "the TC loss term. `gamma` in the paper. is_mutual_info : bool True :", "torch.randperm(batch_size).to(latent_sample.device) perm[:, z] = latent_sample[pi, z] return perm def linear_annealing(init, fin, step, annealing_steps):", "(logqz_prodmarginals - logpz).mean() vae_loss = rec_loss + anneal_rec * (gamma * tc_loss +", "q(z) and the log (product of marginals of q(z_j)) with minibatch stratified sampling.", "the training set References : [1] Chen, <NAME>, et al. \"Isolating sources of", "entropy (bse) loss and is the most commonly used. It has the issue", "C. gamma : float, optional Weight of the KL divergence term. kwargs: Additional", "Parameters ---------- C_init : float, optional Starting annealed capacity C. C_fin : float,", "batch_size = data.size(0) # change latent dist to torch.tensor (could probably avoid this)", ": (batch_size, n_chan, height, width). recon_data : torch.Tensor Reconstructed data. Shape : (batch_size,", "from the latent dimension using the reparameterisation trick shape : (batch_size, latent_dim). data_size", "Systems. 2018. \"\"\" batch_size = latent_dist.size(0) _logqz = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logqz_prodmarginals", "latent_sample2 = model.sample_latent(data2) z_perm = _permute_dims(latent_sample2).detach() d_z_perm = self.discriminator(z_perm) # Calculate total correlation", "C_n_interp=kwargs_parse[\"betaB_stepsC\"], gamma=kwargs_parse[\"betaB_G\"], **kwargs_all) elif name == \"factor\": return FactorKLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], gamma=kwargs_parse[\"factor_G\"], is_mutual_info=not kwargs_parse[\"no_mutual_info\"],", "disentanglement in variational autoencoders.\" Advances in Neural Information Processing Systems. 2018. \"\"\" def", "code with the following commented out code after viz is fixed # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863", "storer) d_z = self.discriminator(latent_sample1) # clamping to 0 because TC cannot be negative", "_minibatch_weighted_sampling(latent_dist, latent_sample1, self.data_size) else: # minibatch stratified sampling _, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample1,", "d_z)).clamp(0).mean() anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if model.training else 1) # TODO", "# calculate log p(z) prior_params = torch.zeros(half_batch_size, latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample1, prior_params,", "width). recon_data : torch.Tensor Reconstructed data. Shape : (batch_size, n_chan, height, width). latent_dist", "logvar of the normal distribution. Shape (batch_size, latent_dim, 2) latent_sample: torch.Tensor sample from", "# minibatch weighted sampling logqz, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample, self.dataset_size) else: # minibatch", "perm def linear_annealing(init, fin, step, annealing_steps): \"\"\"Linear annealing of a parameter.\"\"\" if annealing_steps", "\"betaB\": return BetaBLoss(C_init=kwargs_parse[\"betaB_initC\"], C_fin=kwargs_parse[\"betaB_finC\"], C_n_interp=kwargs_parse[\"betaB_stepsC\"], gamma=kwargs_parse[\"betaB_G\"], **kwargs_all) elif name == \"factor\": return FactorKLoss(kwargs_parse[\"device\"],", "recon_data : torch.Tensor Reconstructed data. Shape : (batch_size, n_chan, height, width). distribution :", "they sample 2 batches batch_size = data.size(dim=0) half_batch_size = batch_size // 2 data", "constrained variational framework.\" (2016). kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. \"\"\" def", "# clamping to 0 because TC cannot be negative : TEST tc_loss =", "storer=None): \"\"\" Calculates the KL divergence between a normal distribution with diagonal covariance", "of disentanglement in variational autoencoders.\" Advances in Neural Information Processing Systems. 2018. \"\"\"", "= fin - init annealed = min(init + delta * step / annealing_steps,", "mean of kl for each latent dimension latent_kl = 0.5 * (-1 -", "= _dimwise_kl_loss(*latent_dist, storer) # # vae_loss = rec_loss + kl_loss + beta *", "loss as in [1] Parameters ---------- beta : float, optional Weight of the", "data[0] data2 = data[1] # Factor VAE Loss recon_batch, latent_dist, latent_sample1 = model(data1)", "0 self.record_loss_every = record_loss_every self.rec_dist = rec_dist self.steps_anneal = steps_anneal @abc.abstractmethod def __call__(self,", "in $\\beta$-VAE.\" arXiv preprint arXiv:1804.03599 (2018). \"\"\" def __init__(self, C_init=0., C_fin=5., C_n_interp=25000, gamma=30.,", "cross entropy (bse), Gaussian corresponds to MSE, Laplace corresponds to L1. steps_anneal: nool,", "from .discriminator import Discriminator from disvae.utils.math import log_density_normal, log_importance_weight_matrix # TO-DO: clean data_size", "(batch_size, latent_dim). storer : dict Dictionary in which to store important variables for", "(logqz_prodmarginals - logpz).mean() anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train else 1)", "* tc_loss if storer is not None: storer['loss'].append(vae_loss.item()) storer['tc_loss'].append(tc_loss.item()) if not model.training: #", "\"gaussian\", \"laplace\"}, optional Reconstruction distribution istribution of the likelihood on the each pixel.", "Processing Systems. 2018. \"\"\" batch_size = latent_dist.size(0) _logqz = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True)", "batch_size // 2 data = data.split(half_batch_size) data1 = data[0] data2 = data[1] #", "storer is not None: storer['loss'].append(vae_loss.item()) storer['tc_loss'].append(tc_loss.item()) if not model.training: # don't backprop if", "+ str(i)].append(latent_kl[i].item()) return total_kl def _permute_dims(latent_sample): \"\"\" Implementation of Algorithm 1 in ref", "to store important variables for vizualisation. \"\"\" def _pre_call(self, is_train, storer): if is_train:", "self.gamma = gamma self.C_init = C_init self.C_fin = C_fin self.C_n_interp = C_n_interp def", "_log q(z) matrix logqz_condx = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=False).sum(dim=1) # calculate log p(z)", "each pixel. Implicitely defines the reconstruction loss. Bernoulli corresponds to a binary cross", "data.size(0) # change latent dist to torch.tensor (could probably avoid this) latent_dist =", "distribution corresponds to MSE, and is sometimes used, but hard to train ecause", "logqz_prodmarginals) for i in range(latent_dist.size(1)): storer['kl_loss_' + str(i)].append(tc_loss_vec[i].item()) return loss def _minibatch_weighted_sampling(latent_dist, latent_sample,", "<NAME>, et al. \"Isolating sources of disentanglement in variational autoencoders.\" Advances in Neural", "minibatch weighted sampling _, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample1, self.data_size) else: # minibatch stratified", "latent dimension. E.g. for gaussian (mean, log_var) each of shape : (batch_size, latent_dim).", "if not self.is_mss: # minibatch weighted sampling _, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample1, self.data_size)", "tc_loss if storer is not None: storer['loss'].append(vae_loss.item()) storer['tc_loss'].append(tc_loss.item()) if not model.training: # don't", "loss = F.l1_loss(recon_data, data, reduction=\"sum\") else: raise ValueError(\"Unkown distribution: {}\".format(distribution)) loss = loss", "= C_n_interp def __call__(self, data, recon_data, latent_dist, is_train, storer): storer = self._pre_call(is_train, storer)", "data2 = data[1] # Factor VAE Loss recon_batch, latent_dist, latent_sample1 = model(data1) rec_loss", "latent dimensions (mean and log_var). Parameters ---------- latent_sample: torch.Tensor sample from the latent", "logpz = log_density_normal(latent_sample, prior_params, batch_size, return_matrix=False).view(batch_size, -1).sum(1) if not self.is_mss: # minibatch weighted", "Weight of the total correlation term. gamma : float Weight of the dimension-wise", "= latent_kl.sum() if storer is not None: storer['kl_loss'].append(total_kl.item()) for i in range(latent_dim): storer['kl_loss_'", "batch_size, 1) + _logqz, dim=1, keepdim=False).sum(1) return logqz, logqz_prodmarginals def _reconstruction_loss(data, recon_data, distribution=\"bernoulli\",", "optimizer optimizer.zero_grad() vae_loss.backward(retain_graph=True) optimizer.step() # Discriminator Loss # Get second sample of latent", "(gamma * tc_loss + dw_kl_loss) # if self.is_mutual_info: # beta = self.beta #", "of the likelihood on the each pixel. Implicitely defines the loss Bernoulli corresponds", "is_train else 1) loss = rec_loss + anneal_rec * (self.beta * kl_loss) if", "by 255 and divide 255, is the same as not doing anything for", "Information Processing Systems. 2018. \"\"\" batch_size = latent_dist.size(0) _logqz = log_density_normal(latent_sample, latent_dist, batch_size,", "very wrong. Laplace distribution corresponds to L1 solves partially the issue of MSE.", "pixel. Implicitely defines the loss Bernoulli corresponds to a binary cross entropy (bse)", "log q(z|x) and _log q(z) matrix logqz_condx = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=False).sum(dim=1) #", "linear_annealing(init, fin, step, annealing_steps): \"\"\"Linear annealing of a parameter.\"\"\" if annealing_steps == 0:", "+ self.gamma * dw_kl_loss) if storer is not None: storer['loss'].append(loss.item()) storer['mi_loss'].append(mi_loss.item()) storer['tc_loss'].append(tc_loss.item()) storer['dw_kl_loss'].append(dw_kl_loss.item())", "logpz = log_density_normal(latent_sample1, prior_params, half_batch_size, return_matrix=False).view(half_batch_size, -1).sum(1) if not self.is_mss: # minibatch weighted", "arXiv:1802.05983 (2018). \"\"\" def __init__(self, device, data_size, gamma=40., is_mutual_info=True, is_mss=False, disc_kwargs=dict(neg_slope=0.2, latent_dim=10, hidden_units=1000),", "(2016). kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. \"\"\" def __init__(self, beta=4, **kwargs):", "float, optional Weight of the TC loss term. `gamma` in the paper. is_mutual_info", "and is the most commonly used. It has the issue that it doesn't", "\"beta-vae: Learning basic visual concepts with a constrained variational framework.\" (2016). kwargs: Additional", "reparameterisation trick shape : (batch_size, latent_dim). data_size : int Number of data in", "torch.Tensor Diagonal log variance of the normal distribution. Shape (batch_size, latent_dim) storer :", "adding the regularisation. \"\"\" def __init__(self, record_loss_every=50, rec_dist=\"bernoulli\", steps_anneal=0): self.n_train_steps = 0 self.record_loss_every", "recon_batch, storer=storer, distribution=self.rec_dist) mi_loss = (logqz_condx - logqz).mean() tc_loss = (logqz - logqz_prodmarginals).mean()", "torch.Tensor Reconstructed data. Shape : (batch_size, n_chan, height, width). latent_dist : tuple of", "model.training: # don't backprop if evaluating return vae_loss # Run VAE optimizer optimizer.zero_grad()", "rec_loss + anneal_rec * (gamma * tc_loss + dw_kl_loss) # if self.is_mutual_info: #", "1 in ref [1]. Randomly permutes the sample from q(z) (latent_dist) across the", "batch of data. Parameters ---------- data : torch.Tensor Input data (e.g. batch of", "return logqz, logqz_prodmarginals def _minibatch_stratified_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates log q(z) and the", "\"\"\" Base class for losses. Parameters ---------- record_loss_every: int, optional Every how many", "<filename>disvae/models/losses.py \"\"\" Module containing all vae losses. \"\"\" import abc import math import", "this code with the following commented out code after viz is fixed #", "vae losses. \"\"\" import abc import math import torch from torch.nn import functional", "self.gamma = gamma self.is_mss = is_mss # minibatch stratified sampling def __call__(self, data,", "that are very wrong. Laplace distribution corresponds to L1 solves partially the issue", "optimizer.step() # Discriminator Loss # Get second sample of latent distribution latent_sample2 =", "Irina, et al. \"beta-vae: Learning basic visual concepts with a constrained variational framework.\"", "`gamma` in the paper. is_mutual_info : bool True : includes the mutual information", "data.split(half_batch_size) data1 = data[0] data2 = data[1] # Factor VAE Loss recon_batch, latent_dist,", "height, width). recon_data : torch.Tensor Reconstructed data. Shape : (batch_size, n_chan, height, width).", "Loss recon_batch, latent_dist, latent_sample1 = model(data1) rec_loss = _reconstruction_loss(data1, recon_batch, storer=storer, distribution=self.rec_dist) #", "255, reduction=\"sum\") / 255 elif distribution == \"laplace\": # loss in [0,255] space", "return logqz, logqz_prodmarginals def _reconstruction_loss(data, recon_data, distribution=\"bernoulli\", storer=None): \"\"\" Calculates the per image", "batch of images). Shape : (batch_size, n_chan, height, width). recon_data : torch.Tensor Reconstructed", "istribution of the likelihood on the each pixel. Implicitely defines the reconstruction loss.", "kl_loss) if storer is not None: storer['loss'].append(loss.item()) return loss class BetaBLoss(BaseLoss): \"\"\" Compute", "and device def get_loss_f(name, kwargs_parse={}): \"\"\"Return the correct loss function given the argparse", "Number of data in the training set References : [1] Chen, <NAME>, et", "(e.g. batch of images). Shape : (batch_size, n_chan, height, width). recon_data : torch.Tensor", "latent_sample, self.dataset_size) # rec loss, mutual information, total correlation and dim-wise kl rec_loss", "to L1 solves partially the issue of MSE. storer : dict Dictionary in", "BatchTCLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], alpha=kwargs_parse[\"batchTC_A\"], beta=kwargs_parse[\"batchTC_B\"], gamma=kwargs_parse[\"batchTC_G\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) else: raise ValueError(\"Uknown loss :", "latent_dim) where D is dimension of distribution. logvar : torch.Tensor Diagonal log variance", "normalized per batch but not pixel and channel) \"\"\" batch_size, n_chan, height, width", "\"\"\" def __init__(self, C_init=0., C_fin=5., C_n_interp=25000, gamma=30., **kwargs): super().__init__(**kwargs) self.gamma = gamma self.C_init", "data (e.g. batch of images). Shape : (batch_size, n_chan, height, width). recon_data :", "2) latent_sample: torch.Tensor sample from the latent dimension using the reparameterisation trick shape", "minibatch stratified sampling according to [1] Parameters ---------- data_size: int Size of the", "dim-wise kl rec_loss = _reconstruction_loss(data, recon_batch, storer=storer, distribution=self.rec_dist) mi_loss = (logqz_condx - logqz).mean()", "weighted sampling logqz, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample, self.dataset_size) else: # minibatch stratified sampling", "In the paper they sample 2 batches batch_size = data.size(dim=0) half_batch_size = batch_size", "steps_anneal: nool, optional Number of annealing steps where gradually adding the regularisation. \"\"\"", "across the batch for each of the latent dimensions (mean and log_var). Parameters", "sampling or minibatch stratified sampling according to [1] Parameters ---------- data_size: int Size", "torch.nn import functional as F from torch import optim from .discriminator import Discriminator", "self.n_train_steps, self.steps_anneal) if is_train else 1) # total loss loss = rec_loss +", "= rec_loss + kl_loss + beta * tc_loss if storer is not None:", "Input data (e.g. batch of images). Shape : (batch_size, n_chan, height, width). recon_data", "minibatch weighted sampling logqz, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample, self.dataset_size) else: # minibatch stratified", "batch_size = data.size(dim=0) half_batch_size = batch_size // 2 data = data.split(half_batch_size) data1 =", "E.g. for gaussian (mean, log_var) each of shape : (batch_size, latent_dim). storer :", "# # beta has to be increased by one for correct comparaison #", "stratified sampling _, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample1, self.data_size) gamma = self.gamma + 1", ": bool Selects either minibatch stratified sampling (True) or minibatch weighted sampling (False)", "but not pixel and channel) \"\"\" batch_size, n_chan, height, width = recon_data.size() is_colored", "self.steps_anneal) if is_train else 1) loss = rec_loss + anneal_rec * (self.beta *", "perm[:, z] = latent_sample[pi, z] return perm def linear_annealing(init, fin, step, annealing_steps): \"\"\"Linear", "latent_dist = torch.stack((latent_dist[0], latent_dist[1]), dim=2) # calculate log q(z|x) and _log q(z) matrix", "_kl_normal_loss(*latent_dist, storer) anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train else 1) loss", "storer = self._pre_call(model.training, storer) # factor-vae split data into two batches. In the", "mi_loss = (logqz_condx - logqz).mean() tc_loss = (logqz - logqz_prodmarginals).mean() dw_kl_loss = (logqz_prodmarginals", "vizualisation. \"\"\" latent_dim = mean.size(1) # batch mean of kl for each latent", "None: storer['discrim_loss'].append(d_tc_loss.item()) return vae_loss class BatchTCLoss(BaseLoss): \"\"\" Compute the decomposed KL loss with", "total loss loss = rec_loss + anneal_rec * (self.alpha * mi_loss + self.beta", "this) latent_dist = torch.stack((latent_dist[0], latent_dist[1]), dim=2) # calculate log p(z) prior_params = torch.zeros(half_batch_size,", "hard to train ecause it ends up focusing only a few pixels that", "function given the argparse arguments.\"\"\" kwargs_all = dict(rec_dist=kwargs_parse[\"rec_dist\"], steps_anneal=kwargs_parse[\"reg_anneal\"]) if name == \"betaH\":", "not be optimal. Gaussian distribution corresponds to MSE, and is sometimes used, but", "the reparameterisation trick shape : (batch_size, latent_dim). References ---------- [1] <NAME>, and <NAME>.", "sampling (False) kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Chen,", "record_loss_every: int, optional Every how many steps to recorsd the loss. rec_dist: {\"bernoulli\",", "= _minibatch_weighted_sampling(latent_dist, latent_sample, self.dataset_size) else: # minibatch stratified sampling logqz, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist,", "logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample1, self.data_size) gamma = self.gamma + 1 dw_kl_loss = (logqz_prodmarginals", "= _minibatch_weighted_sampling(latent_dist, latent_sample1, self.data_size) else: # minibatch stratified sampling _, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist,", "n_chan, height, width = recon_data.size() is_colored = n_chan == 3 if distribution ==", "stratified sampling. Parameters ---------- latent_dist : torch.Tensor Mean and logvar of the normal", "per image reconstruction loss for a batch of data. Parameters ---------- data :", "between a normal distribution with diagonal covariance and a unit normal distribution. Parameters", "mean.pow(2) + logvar.exp()).mean(dim=0) total_kl = latent_kl.sum() if storer is not None: storer['kl_loss'].append(total_kl.item()) for", "1) # TODO replace this code with the following commented out code after", "elif name == \"VAE\": return BetaHLoss(beta=1, **kwargs_all) elif name == \"betaB\": return BetaBLoss(C_init=kwargs_parse[\"betaB_initC\"],", "= data.split(half_batch_size) data1 = data[0] data2 = data[1] # Factor VAE Loss recon_batch,", "prior_params = torch.zeros(batch_size, latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample, prior_params, batch_size, return_matrix=False).view(batch_size, -1).sum(1) if", "not None: storer['kl_loss'].append(total_kl.item()) for i in range(latent_dim): storer['kl_loss_' + str(i)].append(latent_kl[i].item()) return total_kl def", "disentanglement in variational autoencoders.\" Advances in Neural Information Processing Systems. 2018. \"\"\" batch_size", "logqz, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample, self.dataset_size) else: # minibatch stratified sampling logqz, logqz_prodmarginals", "a batch of data. Parameters ---------- data : torch.Tensor Input data (e.g. batch", "vae loss vae_loss = rec_loss + anneal_rec * (kl_loss + self.gamma * tc_loss)", "the dimension-wise KL term. latent_dim: int Dimension of the latent variable is_mss :", "i in range(latent_dist.size(1)): storer['kl_loss_' + str(i)].append(tc_loss_vec[i].item()) return loss def _minibatch_weighted_sampling(latent_dist, latent_sample, data_size): \"\"\"", "per batch but not pixel and channel) \"\"\" batch_size, n_chan, height, width =", "**kwargs_all) elif name == \"VAE\": return BetaHLoss(beta=1, **kwargs_all) elif name == \"betaB\": return", "perm = torch.zeros_like(latent_sample) batch_size, dim_z = perm.size() for z in range(dim_z): pi =", "if self.is_mutual_info: # return vae loss vae_loss = rec_loss + anneal_rec * (kl_loss", "C_fin : float, optional Final annealed capacity C. C_n_interp : float, optional Number", "__init__(self, device, data_size, gamma=40., is_mutual_info=True, is_mss=False, disc_kwargs=dict(neg_slope=0.2, latent_dim=10, hidden_units=1000), optim_kwargs=dict(lr=5e-4, betas=(0.5, 0.9)), **kwargs):", "# rec loss, mutual information, total correlation and dim-wise kl rec_loss = _reconstruction_loss(data,", ": (batch_size, n_chan, height, width). distribution : {\"bernoulli\", \"gaussian\", \"laplace\"} Distribution of the", "which to store important variables for vizualisation. \"\"\" def _pre_call(self, is_train, storer): if", "as the TC term is included in `_kl_normal_loss` # beta = self.beta +", "= (logqz_condx - logqz).mean() tc_loss = (logqz - logqz_prodmarginals).mean() dw_kl_loss = (logqz_prodmarginals -", "in the training set References : [1] Chen, <NAME>, et al. \"Isolating sources", "arguments.\"\"\" kwargs_all = dict(rec_dist=kwargs_parse[\"rec_dist\"], steps_anneal=kwargs_parse[\"reg_anneal\"]) if name == \"betaH\": return BetaHLoss(beta=kwargs_parse[\"betaH_B\"], **kwargs_all) elif", "2 batches batch_size = data.size(dim=0) half_batch_size = batch_size // 2 data = data.split(half_batch_size)", "raise ValueError(\"Unkown distribution: {}\".format(distribution)) loss = loss / batch_size if storer is not", "* 255, reduction=\"sum\") / 255 elif distribution == \"laplace\": # loss in [0,255]", "store important variables for vizualisation. \"\"\" def _pre_call(self, is_train, storer): if is_train: self.n_train_steps", "batches batch_size = data.size(dim=0) half_batch_size = batch_size // 2 data = data.split(half_batch_size) data1", "`BaseLoss`, e.g. rec_dist`. References ---------- [1] Chen, <NAME>, et al. \"Isolating sources of", "KL term. latent_dim: int Dimension of the latent variable is_mss : bool Selects", "\"\"\"Linear annealing of a parameter.\"\"\" if annealing_steps == 0: return fin assert fin", "* tc_loss) else: # return vae loss without mutual information term # change", "data[1] # Factor VAE Loss recon_batch, latent_dist, latent_sample1 = model(data1) rec_loss = _reconstruction_loss(data1,", "to be increased by one for correct comparaison # # as the TC", "as in [1] Parameters ---------- beta : float, optional Weight of the kl", "data into two batches. In the paper they sample 2 batches batch_size =", "the dataset alpha : float Weight of the mutual information term. beta :", "latent_sample[pi, z] return perm def linear_annealing(init, fin, step, annealing_steps): \"\"\"Linear annealing of a", "dw_kl_loss) if storer is not None: storer['loss'].append(loss.item()) storer['mi_loss'].append(mi_loss.item()) storer['tc_loss'].append(tc_loss.item()) storer['dw_kl_loss'].append(dw_kl_loss.item()) # TODO Remove", "self._pre_call(model.training, storer) # factor-vae split data into two batches. In the paper they", "corresponds to L1. steps_anneal: nool, optional Number of annealing steps where gradually adding", "Weight of the dimension-wise KL term. latent_dim: int Dimension of the latent variable", "kl_loss = _kl_normal_loss(*latent_dist, storer) anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train else", "self.steps_anneal = steps_anneal @abc.abstractmethod def __call__(self, data, recon_data, latent_dist, is_train, storer): \"\"\" Calculates", "self.discriminator(latent_sample1) # clamping to 0 because TC cannot be negative : TEST tc_loss", "same way (0.1,0.2) and (0.4,0.5), which might not be optimal. Gaussian distribution corresponds", "(batch_size, latent_dim). References ---------- [1] <NAME>, and <NAME>. \"Disentangling by factorising.\" arXiv preprint", "visual concepts with a constrained variational framework.\" (2016). kwargs: Additional arguments for `BaseLoss`,", "* (gamma * tc_loss + dw_kl_loss) # if self.is_mutual_info: # beta = self.beta", "the log (product of marginals of q(z_j)) with minibatch weighted sampling. Parameters ----------", "= F.l1_loss(recon_data, data, reduction=\"sum\") else: raise ValueError(\"Unkown distribution: {}\".format(distribution)) loss = loss /", "information term # change latent dist to torch.tensor (could probably avoid this) latent_dist", "using the reparameterisation trick shape : (batch_size, latent_dim). data_size : int Number of", "on the each pixel. Implicitely defines the reconstruction loss. Bernoulli corresponds to a", "d_z_perm))).mean() # Run discriminator optimizer self.optimizer_d.zero_grad() d_tc_loss.backward() self.optimizer_d.step() if storer is not None:", "of shape : (batch_size, latent_dim). storer : dict Dictionary in which to store", "KL divergence between a normal distribution with diagonal covariance and a unit normal", "divide 255, is the same as not doing anything for L1 loss =", "containing all vae losses. \"\"\" import abc import math import torch from torch.nn", "# Factor VAE Loss recon_batch, latent_dist, latent_sample1 = model(data1) rec_loss = _reconstruction_loss(data1, recon_batch,", "_minibatch_stratified_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates log q(z) and the log (product of marginals", "Algorithm 2 of [1] Parameters ---------- device : torch.device beta : float, optional", "loss function given the argparse arguments.\"\"\" kwargs_all = dict(rec_dist=kwargs_parse[\"rec_dist\"], steps_anneal=kwargs_parse[\"reg_anneal\"]) if name ==", "\"\"\" def _pre_call(self, is_train, storer): if is_train: self.n_train_steps += 1 if not is_train", "has to be increased by one for correct comparaison # # as the", "Beta-VAE loss as in [1] Parameters ---------- beta : float, optional Weight of", "float, optional Weight of the KL divergence term. kwargs: Additional arguments for `BaseLoss`,", "= data.size(0) if storer is not None: storer['loss'].append(loss.item()) return loss class FactorKLoss(BaseLoss): \"\"\"", "too big but # multiply by 255 and divide 255, is the same", "with minibatch weighted sampling. Parameters ---------- latent_dist : torch.Tensor Mean and logvar of", "__init__(self, device, data_size, alpha=1., beta=6., gamma=1., is_mss=False, **kwargs): super().__init__(**kwargs) # beta values: dsprites:", "= _reconstruction_loss(data, recon_batch, storer=storer, distribution=self.rec_dist) mi_loss = (logqz_condx - logqz).mean() tc_loss = (logqz", "C_n_interp=25000, gamma=30., **kwargs): super().__init__(**kwargs) self.gamma = gamma self.C_init = C_init self.C_fin = C_fin", "= Discriminator(**disc_kwargs).to(self.device) self.optimizer_d = optim.Adam(self.discriminator.parameters(), **optim_kwargs) def __call__(self, data, model, optimizer, storer): storer", "anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train else 1) loss = rec_loss", "\"\"\" Estimates log q(z) and the log (product of marginals of q(z_j)) with", "batch_size = data.size(0) if storer is not None: storer['loss'].append(loss.item()) return loss class FactorKLoss(BaseLoss):", "data in the training set References : [1] Chen, <NAME>, et al. \"Isolating", "= (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train else 1) loss = rec_loss +", "= data.size(dim=0) half_batch_size = batch_size // 2 data = data.split(half_batch_size) data1 = data[0]", "is not None: storer['loss'].append(loss.item()) return loss class BetaBLoss(BaseLoss): \"\"\" Compute the Beta-VAE loss", "calculate log p(z) prior_params = torch.zeros(batch_size, latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample, prior_params, batch_size,", "to a binary cross entropy (bse), Gaussian corresponds to MSE, Laplace corresponds to", "* 255, data * 255, reduction=\"sum\") / 255 elif distribution == \"laplace\": #", "et al. \"Isolating sources of disentanglement in variational autoencoders.\" Advances in Neural Information", "# minibatch stratified sampling _, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample1, self.data_size) gamma = self.gamma", "None: storer['loss'].append(loss.item()) storer['mi_loss'].append(mi_loss.item()) storer['tc_loss'].append(tc_loss.item()) storer['dw_kl_loss'].append(dw_kl_loss.item()) # TODO Remove this when visualisation fixed tc_loss_vec", "= log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logqz_prodmarginals = (torch.logsumexp(_logqz, dim=1, keepdim=False) - math.log(batch_size *", "self.n_train_steps, self.steps_anneal) if is_train else 1) loss = rec_loss + anneal_rec * (self.beta", "_minibatch_stratified_sampling(latent_dist, latent_sample, self.dataset_size) # rec loss, mutual information, total correlation and dim-wise kl", "q(z) matrix logqz_condx = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=False).sum(dim=1) # calculate log p(z) prior_params", "data = data.split(half_batch_size) data1 = data[0] data2 = data[1] # Factor VAE Loss", "be too big but # multiply by 255 and divide 255, is the", "float Weight of the total correlation term. gamma : float Weight of the", "# kl_loss = _dimwise_kl_loss(*latent_dist, storer) # # vae_loss = rec_loss + kl_loss +", "not be too big but # multiply by 255 and divide 255, is", "latent_dist, batch_size, return_matrix=False).sum(dim=1) # calculate log p(z) prior_params = torch.zeros(batch_size, latent_dist.size(1), 2).to(self.device) logpz", "storer=storer, distribution=self.rec_dist) # TODO: remove this kl_loss term once viz is sorted #", "Systems. 2018. \"\"\" def __init__(self, device, data_size, alpha=1., beta=6., gamma=1., is_mss=False, **kwargs): super().__init__(**kwargs)", "commonly used. It has the issue that it doesn't penalize the same way", "partially the issue of MSE. storer : dict Dictionary in which to store", "in range(dim_z): pi = torch.randperm(batch_size).to(latent_sample.device) perm[:, z] = latent_sample[pi, z] return perm def", "unit normal distribution. Parameters ---------- mean : torch.Tensor Mean of the normal distribution.", "Calculates the KL divergence between a normal distribution with diagonal covariance and a", "= latent_dist.size(0) _logqz = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logqz_prodmarginals = (torch.logsumexp(_logqz, dim=1, keepdim=False)", "split data into two batches. In the paper they sample 2 batches batch_size", "tc_loss_vec = (logqz - logqz_prodmarginals) for i in range(latent_dist.size(1)): storer['kl_loss_' + str(i)].append(tc_loss_vec[i].item()) return", ": {\"bernoulli\", \"gaussian\", \"laplace\"} Distribution of the likelihood on the each pixel. Implicitely", "return BatchTCLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], alpha=kwargs_parse[\"batchTC_A\"], beta=kwargs_parse[\"batchTC_B\"], gamma=kwargs_parse[\"batchTC_G\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) else: raise ValueError(\"Uknown loss", "= n_chan == 3 if distribution == \"bernoulli\": loss = F.binary_cross_entropy(recon_data, data, reduction=\"sum\")", "- init annealed = min(init + delta * step / annealing_steps, fin) return", "for z in range(dim_z): pi = torch.randperm(batch_size).to(latent_sample.device) perm[:, z] = latent_sample[pi, z] return", "= gamma self.C_init = C_init self.C_fin = C_fin self.C_n_interp = C_n_interp def __call__(self,", "annealed capacity C. C_fin : float, optional Final annealed capacity C. C_n_interp :", "self.discriminator = Discriminator(**disc_kwargs).to(self.device) self.optimizer_d = optim.Adam(self.discriminator.parameters(), **optim_kwargs) def __call__(self, data, model, optimizer, storer):", ": [1] Chen, <NAME>, et al. \"Isolating sources of disentanglement in variational autoencoders.\"", "\"gaussian\", \"laplace\"} Distribution of the likelihood on the each pixel. Implicitely defines the", "latent_dist, latent_sample1 = model(data1) rec_loss = _reconstruction_loss(data1, recon_batch, storer=storer, distribution=self.rec_dist) # TODO: remove", "{}\".format(name)) class BaseLoss(abc.ABC): \"\"\" Base class for losses. Parameters ---------- record_loss_every: int, optional", "(could probably avoid this) latent_dist = torch.stack((latent_dist[0], latent_dist[1]), dim=2) # calculate log p(z)", "= - (0.5 * (F.logsigmoid(d_z) + F.logsigmoid(1 - d_z_perm))).mean() # Run discriminator optimizer", "few pixels that are very wrong. Laplace distribution corresponds to L1 solves partially", "correct comparaison # # as the TC term is included in `_kl_normal_loss` #", "in range(latent_dist.size(1)): storer['kl_loss_' + str(i)].append(tc_loss_vec[i].item()) return loss def _minibatch_weighted_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates", "---------- data : torch.Tensor Input data (e.g. batch of images). Shape : (batch_size,", "argparse arguments.\"\"\" kwargs_all = dict(rec_dist=kwargs_parse[\"rec_dist\"], steps_anneal=kwargs_parse[\"reg_anneal\"]) if name == \"betaH\": return BetaHLoss(beta=kwargs_parse[\"betaH_B\"], **kwargs_all)", "mutual information discriminator : disvae.discriminator.Discriminator optimizer_d : torch.optim kwargs: Additional arguments for `BaseLoss`,", "L1 loss = F.l1_loss(recon_data, data, reduction=\"sum\") else: raise ValueError(\"Unkown distribution: {}\".format(distribution)) loss =", "latent_dist[1]), dim=2) # calculate log p(z) prior_params = torch.zeros(half_batch_size, latent_dist.size(1), 2).to(self.device) logpz =", "batch for each of the latent dimensions (mean and log_var). Parameters ---------- latent_sample:", "the issue that it doesn't penalize the same way (0.1,0.2) and (0.4,0.5), which", "used, but hard to train ecause it ends up focusing only a few", "elif name == \"factor\": return FactorKLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], gamma=kwargs_parse[\"factor_G\"], is_mutual_info=not kwargs_parse[\"no_mutual_info\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all)", "all vae losses. \"\"\" import abc import math import torch from torch.nn import", "logqz, logqz_prodmarginals def _minibatch_stratified_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates log q(z) and the log", "mutual information term in the loss False : removes mutual information discriminator :", "recon_data, storer=storer, distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer) anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal)", "log q(z) and the log (product of marginals of q(z_j)) with minibatch weighted", "C = (linear_annealing(self.C_init, self.C_fin, self.n_train_steps, self.C_n_interp) if is_train else self.C_fin) loss = rec_loss", "_kl_normal_loss(mean, logvar, storer=None): \"\"\" Calculates the KL divergence between a normal distribution with", "is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) elif name == \"batchTC\": return BatchTCLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], alpha=kwargs_parse[\"batchTC_A\"], beta=kwargs_parse[\"batchTC_B\"], gamma=kwargs_parse[\"batchTC_G\"],", "def __init__(self, record_loss_every=50, rec_dist=\"bernoulli\", steps_anneal=0): self.n_train_steps = 0 self.record_loss_every = record_loss_every self.rec_dist =", "data, reduction=\"sum\") else: raise ValueError(\"Unkown distribution: {}\".format(distribution)) loss = loss / batch_size if", "255 to not be too big but # multiply by 255 and divide", "losses. Parameters ---------- record_loss_every: int, optional Every how many steps to recorsd the", "rec_dist=\"bernoulli\", steps_anneal=0): self.n_train_steps = 0 self.record_loss_every = record_loss_every self.rec_dist = rec_dist self.steps_anneal =", "loss for a batch of data. Parameters ---------- data : torch.Tensor Input data", "\"\"\" Calculates the per image reconstruction loss for a batch of data. Parameters", "and divide 255, is the same as not doing anything for L1 loss", "- math.log(batch_size * data_size) return logqz, logqz_prodmarginals def _minibatch_stratified_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates", "Kim, Hyunjik, and <NAME>. \"Disentangling by factorising.\" arXiv preprint arXiv:1802.05983 (2018). \"\"\" def", "(0.4,0.5), which might not be optimal. Gaussian distribution corresponds to MSE, and is", "__call__(self, data, recon_batch, latent_dist, is_train, storer, latent_sample=None): storer = self._pre_call(is_train, storer) batch_size =", "0.5 * (-1 - logvar + mean.pow(2) + logvar.exp()).mean(dim=0) total_kl = latent_kl.sum() if", "\"\"\" batch_size = latent_dist.size(0) _logqz = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logiw_matrix = log_importance_weight_matrix(batch_size,", "optim_kwargs=dict(lr=5e-4, betas=(0.5, 0.9)), **kwargs): super().__init__(**kwargs) self.gamma = gamma self.data_size = data_size self.device =", "= torch.logsumexp(_logqz.sum(2), dim=1, keepdim=False) \\ - math.log(batch_size * data_size) return logqz, logqz_prodmarginals def", "* kl_loss) if storer is not None: storer['loss'].append(loss.item()) return loss class BetaBLoss(BaseLoss): \"\"\"", "self.gamma * (kl_loss - C).abs() batch_size = data.size(0) if storer is not None:", "# minibatch stratified sampling logqz, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample, self.dataset_size) # rec loss,", "* dw_kl_loss) if storer is not None: storer['loss'].append(loss.item()) storer['mi_loss'].append(mi_loss.item()) storer['tc_loss'].append(tc_loss.item()) storer['dw_kl_loss'].append(dw_kl_loss.item()) # TODO", "to not be too big but # multiply by 255 and divide 255,", "from q(z) (latent_dist) across the batch for each of the latent dimensions (mean", "by one for correct comparaison # # as the TC term is included", "= torch.stack((latent_dist[0], latent_dist[1]), dim=2) # calculate log q(z|x) and _log q(z) matrix logqz_condx", "variance of the normal distribution. Shape (batch_size, latent_dim) storer : dict Dictionary in", "with diagonal covariance and a unit normal distribution. Parameters ---------- mean : torch.Tensor", "optim.Adam(self.discriminator.parameters(), **optim_kwargs) def __call__(self, data, model, optimizer, storer): storer = self._pre_call(model.training, storer) #", "Factor VAE Loss recon_batch, latent_dist, latent_sample1 = model(data1) rec_loss = _reconstruction_loss(data1, recon_batch, storer=storer,", "Shape (batch_size, latent_dim, 2) latent_sample: torch.Tensor sample from the latent dimension using the", "minibatch stratified sampling. Parameters ---------- latent_dist : torch.Tensor Mean and logvar of the", "TC cannot be negative : TEST tc_loss = (F.logsigmoid(d_z) - F.logsigmoid(1 - d_z)).clamp(0).mean()", "from torch.nn import functional as F from torch import optim from .discriminator import", "term once viz is sorted # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 kl_loss = _kl_normal_loss(*latent_dist, storer) d_z =", "latent variable is_mss : bool Selects either minibatch stratified sampling (True) or minibatch", "= data_size self.beta = beta self.alpha = alpha self.gamma = gamma self.is_mss =", "Shape (batch_size, latent_dim) storer : dict Dictionary in which to store important variables", "logpz).mean() anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train else 1) # total", "the latent variable is_mss : bool Selects either minibatch stratified sampling (True) or", "storer['loss'].append(vae_loss.item()) storer['tc_loss'].append(tc_loss.item()) if not model.training: # don't backprop if evaluating return vae_loss #", "(batch_size, latent_dim). data_size : int Number of data in the training set References", "not self.is_mss: # minibatch weighted sampling logqz, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample, self.dataset_size) else:", ": {}\".format(name)) class BaseLoss(abc.ABC): \"\"\" Base class for losses. Parameters ---------- record_loss_every: int,", "rec_loss = _reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer) anneal_rec = (linear_annealing(0,", "only a few pixels that are very wrong. Laplace distribution corresponds to L1", "distribution=self.rec_dist) mi_loss = (logqz_condx - logqz).mean() tc_loss = (logqz - logqz_prodmarginals).mean() dw_kl_loss =", "int Number of data in the training set References : [1] Chen, <NAME>,", "logqz_prodmarginals = (torch.logsumexp(_logqz, dim=1, keepdim=False) - math.log(batch_size * data_size)).sum(dim=1) logqz = torch.logsumexp(_logqz.sum(2), dim=1,", "import log_density_normal, log_importance_weight_matrix # TO-DO: clean data_size and device def get_loss_f(name, kwargs_parse={}): \"\"\"Return", "= (logqz - logqz_prodmarginals).mean() dw_kl_loss = (logqz_prodmarginals - logpz).mean() anneal_rec = (linear_annealing(0, 1,", "ends up focusing only a few pixels that are very wrong. Laplace distribution", "self.n_train_steps = 0 self.record_loss_every = record_loss_every self.rec_dist = rec_dist self.steps_anneal = steps_anneal @abc.abstractmethod", "commented out code after viz is fixed # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 if self.is_mutual_info: # return", "= C_init self.C_fin = C_fin self.C_n_interp = C_n_interp def __call__(self, data, recon_data, latent_dist,", "alpha=1., beta=6., gamma=1., is_mss=False, **kwargs): super().__init__(**kwargs) # beta values: dsprites: 6, celeba: 15", "shape : (batch_size, latent_dim). storer : dict Dictionary in which to store important", "self.optimizer_d.zero_grad() d_tc_loss.backward() self.optimizer_d.step() if storer is not None: storer['discrim_loss'].append(d_tc_loss.item()) return vae_loss class BatchTCLoss(BaseLoss):", "each of shape : (batch_size, latent_dim). storer : dict Dictionary in which to", "of the latent variable is_mss : bool Selects either minibatch stratified sampling (True)", "if not is_train or self.n_train_steps % self.record_loss_every == 1: storer = storer else:", "vae_loss class BatchTCLoss(BaseLoss): \"\"\" Compute the decomposed KL loss with either minibatch weighted", "= _reconstruction_loss(data1, recon_batch, storer=storer, distribution=self.rec_dist) # TODO: remove this kl_loss term once viz", "beta self.alpha = alpha self.gamma = gamma self.is_mss = is_mss # minibatch stratified", "recon_data, latent_dist, is_train, storer): storer = self._pre_call(is_train, storer) rec_loss = _reconstruction_loss(data, recon_data, storer=storer,", "set References : [1] Chen, <NAME>, et al. \"Isolating sources of disentanglement in", "torch.Tensor sample from the latent dimension using the reparameterisation trick shape : (batch_size,", ": torch.Tensor Input data (e.g. batch of images). Shape : (batch_size, n_chan, height,", "# minibatch stratified sampling def __call__(self, data, recon_batch, latent_dist, is_train, storer, latent_sample=None): storer", "(2018). \"\"\" perm = torch.zeros_like(latent_sample) batch_size, dim_z = perm.size() for z in range(dim_z):", "of torch.tensor sufficient statistics of the latent dimension. E.g. for gaussian (mean, log_var)", "not model.training: # don't backprop if evaluating return vae_loss # Run VAE optimizer", "distribution. Parameters ---------- mean : torch.Tensor Mean of the normal distribution. Shape (batch_size,", "anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if model.training else 1) # TODO replace", "import Discriminator from disvae.utils.math import log_density_normal, log_importance_weight_matrix # TO-DO: clean data_size and device", "sampling logqz, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample, self.dataset_size) else: # minibatch stratified sampling logqz,", "optional Number of annealing steps where gradually adding the regularisation. \"\"\" def __init__(self,", "+ _logqz, dim=1, keepdim=False).sum(1) return logqz, logqz_prodmarginals def _reconstruction_loss(data, recon_data, distribution=\"bernoulli\", storer=None): \"\"\"", "logqz_prodmarginals def _reconstruction_loss(data, recon_data, distribution=\"bernoulli\", storer=None): \"\"\" Calculates the per image reconstruction loss", "C_fin self.C_n_interp = C_n_interp def __call__(self, data, recon_data, latent_dist, is_train, storer): storer =", "this) latent_dist = torch.stack((latent_dist[0], latent_dist[1]), dim=2) # calculate log q(z|x) and _log q(z)", "torch.logsumexp(_logqz.sum(2), dim=1, keepdim=False) \\ - math.log(batch_size * data_size) return logqz, logqz_prodmarginals def _minibatch_stratified_sampling(latent_dist,", "marginals of q(z_j)) with minibatch stratified sampling. Parameters ---------- latent_dist : torch.Tensor Mean", "_minibatch_weighted_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates log q(z) and the log (product of marginals", ": dict Dictionary in which to store important variables for vizualisation. \"\"\" latent_dim", "d_tc_loss.backward() self.optimizer_d.step() if storer is not None: storer['discrim_loss'].append(d_tc_loss.item()) return vae_loss class BatchTCLoss(BaseLoss): \"\"\"", "F from torch import optim from .discriminator import Discriminator from disvae.utils.math import log_density_normal,", "in Neural Information Processing Systems. 2018. \"\"\" def __init__(self, device, data_size, alpha=1., beta=6.,", "a binary cross entropy (bse), Gaussian corresponds to MSE, Laplace corresponds to L1.", "+ self.gamma * tc_loss) else: # return vae loss without mutual information term", "1) # total loss loss = rec_loss + anneal_rec * (self.alpha * mi_loss", "self.gamma = gamma self.data_size = data_size self.device = device self.is_mutual_info = is_mutual_info self.is_mss", "dimension latent_kl = 0.5 * (-1 - logvar + mean.pow(2) + logvar.exp()).mean(dim=0) total_kl", "(product of marginals of q(z_j)) with minibatch stratified sampling. Parameters ---------- latent_dist :", "== \"gaussian\": # loss in [0,255] space but normalized by 255 to not", "self.dataset_size = data_size self.beta = beta self.alpha = alpha self.gamma = gamma self.is_mss", "= beta def __call__(self, data, recon_data, latent_dist, is_train, storer): storer = self._pre_call(is_train, storer)", "loss as per Algorithm 2 of [1] Parameters ---------- device : torch.device beta", "2018. \"\"\" batch_size = latent_dist.size(0) _logqz = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logqz_prodmarginals =", "evaluating return vae_loss # Run VAE optimizer optimizer.zero_grad() vae_loss.backward(retain_graph=True) optimizer.step() # Discriminator Loss", "gamma : float Weight of the dimension-wise KL term. latent_dim: int Dimension of", "torch.Tensor Mean and logvar of the normal distribution. Shape (batch_size, latent_dim, 2) latent_sample:", "super().__init__(**kwargs) # beta values: dsprites: 6, celeba: 15 self.device = device self.dataset_size =", "{\"bernoulli\", \"gaussian\", \"laplace\"} Distribution of the likelihood on the each pixel. Implicitely defines", "is dimension of distribution. logvar : torch.Tensor Diagonal log variance of the normal", "data. Shape : (batch_size, n_chan, height, width). distribution : {\"bernoulli\", \"gaussian\", \"laplace\"} Distribution", "TODO: remove this kl_loss term once viz is sorted # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 kl_loss =", "are very wrong. Laplace distribution corresponds to L1 solves partially the issue of", "_logqz = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logiw_matrix = log_importance_weight_matrix(batch_size, data_size).to(latent_dist.device) logqz = torch.logsumexp(logiw_matrix", "def __init__(self, device, data_size, gamma=40., is_mutual_info=True, is_mss=False, disc_kwargs=dict(neg_slope=0.2, latent_dim=10, hidden_units=1000), optim_kwargs=dict(lr=5e-4, betas=(0.5, 0.9)),", "with either minibatch weighted sampling or minibatch stratified sampling according to [1] Parameters", ": float, optional Weight of the KL divergence term. kwargs: Additional arguments for", "normal distribution. Shape (batch_size, latent_dim, 2) latent_sample: torch.Tensor sample from the latent dimension", "rec_loss + anneal_rec * (kl_loss + self.gamma * tc_loss) else: # return vae", "recon_batch, latent_dist, is_train, storer, latent_sample=None): storer = self._pre_call(is_train, storer) batch_size = data.size(0) #", "+ anneal_rec * (gamma * tc_loss + dw_kl_loss) # if self.is_mutual_info: # beta", "1) loss = rec_loss + anneal_rec * (self.beta * kl_loss) if storer is", "import functional as F from torch import optim from .discriminator import Discriminator from", "= F.mse_loss(recon_data * 255, data * 255, reduction=\"sum\") / 255 elif distribution ==", "iterations for interpolating C. gamma : float, optional Weight of the KL divergence", "functional as F from torch import optim from .discriminator import Discriminator from disvae.utils.math", "Weight of the kl divergence. References: [1] Higgins, Irina, et al. \"beta-vae: Learning", "regularisation. \"\"\" def __init__(self, record_loss_every=50, rec_dist=\"bernoulli\", steps_anneal=0): self.n_train_steps = 0 self.record_loss_every = record_loss_every", "d_tc_loss = - (0.5 * (F.logsigmoid(d_z) + F.logsigmoid(1 - d_z_perm))).mean() # Run discriminator", "- logqz_prodmarginals) for i in range(latent_dist.size(1)): storer['kl_loss_' + str(i)].append(tc_loss_vec[i].item()) return loss def _minibatch_weighted_sampling(latent_dist,", "arXiv preprint arXiv:1804.03599 (2018). \"\"\" def __init__(self, C_init=0., C_fin=5., C_n_interp=25000, gamma=30., **kwargs): super().__init__(**kwargs)", "_reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer) C = (linear_annealing(self.C_init, self.C_fin, self.n_train_steps,", "torch.device beta : float, optional Weight of the TC loss term. `gamma` in", "reconstruction loss. Bernoulli corresponds to a binary cross entropy (bse), Gaussian corresponds to", "trick shape : (batch_size, latent_dim). data_size : int Number of data in the", "\"Disentangling by factorising.\" arXiv preprint arXiv:1802.05983 (2018). \"\"\" def __init__(self, device, data_size, gamma=40.,", "= F.binary_cross_entropy(recon_data, data, reduction=\"sum\") elif distribution == \"gaussian\": # loss in [0,255] space", "logqz, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample, self.dataset_size) # rec loss, mutual information, total correlation", "tc_loss + dw_kl_loss) # if self.is_mutual_info: # beta = self.beta # kl_loss =", "the same way (0.1,0.2) and (0.4,0.5), which might not be optimal. Gaussian distribution", "(batch_size, latent_dim, 2) latent_sample: torch.Tensor sample from the latent dimension using the reparameterisation", "torch.Tensor Per image cross entropy (i.e. normalized per batch but not pixel and", "shape : (batch_size, latent_dim). References ---------- [1] <NAME>, and <NAME>. \"Disentangling by factorising.\"", "sampling according to [1] Parameters ---------- data_size: int Size of the dataset alpha", "optional Reconstruction distribution istribution of the likelihood on the each pixel. Implicitely defines", "following commented out code after viz is fixed # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 if self.is_mutual_info: #", "import abc import math import torch from torch.nn import functional as F from", "super().__init__(**kwargs) self.gamma = gamma self.C_init = C_init self.C_fin = C_fin self.C_n_interp = C_n_interp", ": float Weight of the total correlation term. gamma : float Weight of", "too big loss = F.mse_loss(recon_data * 255, data * 255, reduction=\"sum\") / 255", "self.is_mss: # minibatch weighted sampling logqz, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample, self.dataset_size) else: #", "% self.record_loss_every == 1: storer = storer else: storer = None return storer", "def _kl_normal_loss(mean, logvar, storer=None): \"\"\" Calculates the KL divergence between a normal distribution", "batch_size, n_chan, height, width = recon_data.size() is_colored = n_chan == 3 if distribution", "logqz = torch.logsumexp(logiw_matrix + _logqz.sum(2), dim=1, keepdim=False) logqz_prodmarginals = torch.logsumexp(logiw_matrix.view(batch_size, batch_size, 1) +", "= data[0] data2 = data[1] # Factor VAE Loss recon_batch, latent_dist, latent_sample1 =", "and the log (product of marginals of q(z_j)) with minibatch stratified sampling. Parameters", "== \"bernoulli\": loss = F.binary_cross_entropy(recon_data, data, reduction=\"sum\") elif distribution == \"gaussian\": # loss", "storer is not None: storer['loss'].append(loss.item()) storer['mi_loss'].append(mi_loss.item()) storer['tc_loss'].append(tc_loss.item()) storer['dw_kl_loss'].append(dw_kl_loss.item()) # TODO Remove this when", "Selects either minibatch stratified sampling (True) or minibatch weighted sampling (False) kwargs: Additional", "def _minibatch_stratified_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates log q(z) and the log (product of", "mutual information term. beta : float Weight of the total correlation term. gamma", "return loss class FactorKLoss(BaseLoss): \"\"\" Compute the Factor-VAE loss as per Algorithm 2", "but hard to train ecause it ends up focusing only a few pixels", "same as not doing anything for L1 loss = F.l1_loss(recon_data, data, reduction=\"sum\") else:", "be increased by one for correct comparaison # # as the TC term", "**kwargs): super().__init__(**kwargs) self.gamma = gamma self.data_size = data_size self.device = device self.is_mutual_info =", "capacity C. C_n_interp : float, optional Number of training iterations for interpolating C.", ": torch.Tensor Reconstructed data. Shape : (batch_size, n_chan, height, width). distribution : {\"bernoulli\",", "Chen, <NAME>, et al. \"Isolating sources of disentanglement in variational autoencoders.\" Advances in", ": torch.Tensor Reconstructed data. Shape : (batch_size, n_chan, height, width). latent_dist : tuple", "with minibatch stratified sampling. Parameters ---------- latent_dist : torch.Tensor Mean and logvar of", "Implicitely defines the loss Bernoulli corresponds to a binary cross entropy (bse) loss", "else 1) # total loss loss = rec_loss + anneal_rec * (self.alpha *", "of marginals of q(z_j)) with minibatch weighted sampling. Parameters ---------- latent_dist : torch.Tensor", "values: dsprites: 6, celeba: 15 self.device = device self.dataset_size = data_size self.beta =", "is sorted # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 kl_loss = _kl_normal_loss(*latent_dist, storer) d_z = self.discriminator(latent_sample1) # clamping", "\\ - math.log(batch_size * data_size) return logqz, logqz_prodmarginals def _minibatch_stratified_sampling(latent_dist, latent_sample, data_size): \"\"\"", ": torch.Tensor Mean and logvar of the normal distribution. Shape (batch_size, latent_dim, 2)", "storer = None return storer class BetaHLoss(BaseLoss): \"\"\" Compute the Beta-VAE loss as", "if storer is not None: storer['loss'].append(vae_loss.item()) storer['tc_loss'].append(tc_loss.item()) if not model.training: # don't backprop", "Parameters ---------- device : torch.device beta : float, optional Weight of the TC", "loss. rec_dist: {\"bernoulli\", \"gaussian\", \"laplace\"}, optional Reconstruction distribution istribution of the likelihood on", "= (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if model.training else 1) # TODO replace this", "- d_z_perm))).mean() # Run discriminator optimizer self.optimizer_d.zero_grad() d_tc_loss.backward() self.optimizer_d.step() if storer is not", "of the dataset alpha : float Weight of the mutual information term. beta", "* tc_loss + dw_kl_loss) # if self.is_mutual_info: # beta = self.beta # kl_loss", "alpha self.gamma = gamma self.is_mss = is_mss # minibatch stratified sampling def __call__(self,", "solves partially the issue of MSE. storer : dict Dictionary in which to", "self.C_fin, self.n_train_steps, self.C_n_interp) if is_train else self.C_fin) loss = rec_loss + self.gamma *", "paper they sample 2 batches batch_size = data.size(dim=0) half_batch_size = batch_size // 2", "gamma=kwargs_parse[\"betaB_G\"], **kwargs_all) elif name == \"factor\": return FactorKLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], gamma=kwargs_parse[\"factor_G\"], is_mutual_info=not kwargs_parse[\"no_mutual_info\"], is_mss=not", "Calculates loss for a batch of data. Parameters ---------- data : torch.Tensor Input", "kl_loss = _kl_normal_loss(*latent_dist, storer) C = (linear_annealing(self.C_init, self.C_fin, self.n_train_steps, self.C_n_interp) if is_train else", "variable is_mss : bool Selects either minibatch stratified sampling (True) or minibatch weighted", "the latent dimension using the reparameterisation trick shape : (batch_size, latent_dim). References ----------", "Get second sample of latent distribution latent_sample2 = model.sample_latent(data2) z_perm = _permute_dims(latent_sample2).detach() d_z_perm", "\"\"\" batch_size, n_chan, height, width = recon_data.size() is_colored = n_chan == 3 if", "latent_dim). References ---------- [1] <NAME>, and <NAME>. \"Disentangling by factorising.\" arXiv preprint arXiv:1802.05983", "= optim.Adam(self.discriminator.parameters(), **optim_kwargs) def __call__(self, data, model, optimizer, storer): storer = self._pre_call(model.training, storer)", "and a unit normal distribution. Parameters ---------- mean : torch.Tensor Mean of the", "for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Chen, <NAME>, et al. \"Isolating sources", "recon_data, latent_dist, is_train, storer): \"\"\" Calculates loss for a batch of data. Parameters", "import torch from torch.nn import functional as F from torch import optim from", "float, optional Final annealed capacity C. C_n_interp : float, optional Number of training", "of the likelihood on the each pixel. Implicitely defines the reconstruction loss. Bernoulli", "def __call__(self, data, model, optimizer, storer): storer = self._pre_call(model.training, storer) # factor-vae split", "latent_sample1, self.data_size) gamma = self.gamma + 1 dw_kl_loss = (logqz_prodmarginals - logpz).mean() vae_loss", ": bool True : includes the mutual information term in the loss False", "_logqz.sum(2), dim=1, keepdim=False) logqz_prodmarginals = torch.logsumexp(logiw_matrix.view(batch_size, batch_size, 1) + _logqz, dim=1, keepdim=False).sum(1) return", "2 data = data.split(half_batch_size) data1 = data[0] data2 = data[1] # Factor VAE", "loss in [0,255] space but normalized by 255 to not be too big", "for `BaseLoss`, e.g. rec_dist`. \"\"\" def __init__(self, beta=4, **kwargs): super().__init__(**kwargs) self.beta = beta", "not pixel and channel) \"\"\" batch_size, n_chan, height, width = recon_data.size() is_colored =", "assert fin > init delta = fin - init annealed = min(init +", "logqz_condx = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=False).sum(dim=1) # calculate log p(z) prior_params = torch.zeros(batch_size,", ": TEST tc_loss = (F.logsigmoid(d_z) - F.logsigmoid(1 - d_z)).clamp(0).mean() anneal_rec = (linear_annealing(0, 1,", "beta = self.beta # kl_loss = _kl_normal_loss(*latent_dist, storer) # else: # # beta", "BetaBLoss(C_init=kwargs_parse[\"betaB_initC\"], C_fin=kwargs_parse[\"betaB_finC\"], C_n_interp=kwargs_parse[\"betaB_stepsC\"], gamma=kwargs_parse[\"betaB_G\"], **kwargs_all) elif name == \"factor\": return FactorKLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], gamma=kwargs_parse[\"factor_G\"],", "the batch for each of the latent dimensions (mean and log_var). Parameters ----------", "of Algorithm 1 in ref [1]. Randomly permutes the sample from q(z) (latent_dist)", "reconstruction loss for a batch of data. Parameters ---------- data : torch.Tensor Input", "(batch_size, n_chan, height, width). distribution : {\"bernoulli\", \"gaussian\", \"laplace\"} Distribution of the likelihood", "ref [1]. Randomly permutes the sample from q(z) (latent_dist) across the batch for", "-1).sum(1) if not self.is_mss: # minibatch weighted sampling logqz, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample,", "parameter.\"\"\" if annealing_steps == 0: return fin assert fin > init delta =", "is_train, storer): if is_train: self.n_train_steps += 1 if not is_train or self.n_train_steps %", "not None: storer['discrim_loss'].append(d_tc_loss.item()) return vae_loss class BatchTCLoss(BaseLoss): \"\"\" Compute the decomposed KL loss", "each pixel. Implicitely defines the loss Bernoulli corresponds to a binary cross entropy", "stratified sampling according to [1] Parameters ---------- data_size: int Size of the dataset", "= log_importance_weight_matrix(batch_size, data_size).to(latent_dist.device) logqz = torch.logsumexp(logiw_matrix + _logqz.sum(2), dim=1, keepdim=False) logqz_prodmarginals = torch.logsumexp(logiw_matrix.view(batch_size,", "1, self.n_train_steps, self.steps_anneal) if is_train else 1) loss = rec_loss + anneal_rec *", "return loss class BetaBLoss(BaseLoss): \"\"\" Compute the Beta-VAE loss as in [1] Parameters", "removes mutual information discriminator : disvae.discriminator.Discriminator optimizer_d : torch.optim kwargs: Additional arguments for", "* mi_loss + self.beta * tc_loss + self.gamma * dw_kl_loss) if storer is", "dict(rec_dist=kwargs_parse[\"rec_dist\"], steps_anneal=kwargs_parse[\"reg_anneal\"]) if name == \"betaH\": return BetaHLoss(beta=kwargs_parse[\"betaH_B\"], **kwargs_all) elif name == \"VAE\":", "- d_z)).clamp(0).mean() anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if model.training else 1) #", "BatchTCLoss(BaseLoss): \"\"\" Compute the decomposed KL loss with either minibatch weighted sampling or", "_kl_normal_loss(*latent_dist, storer) # else: # # beta has to be increased by one", "---------- beta : float, optional Weight of the kl divergence. References: [1] Higgins,", "clean data_size and device def get_loss_f(name, kwargs_parse={}): \"\"\"Return the correct loss function given", "stratified sampling logqz, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample, self.dataset_size) # rec loss, mutual information,", "References ---------- [1] Kim, Hyunjik, and <NAME>. \"Disentangling by factorising.\" arXiv preprint arXiv:1802.05983", "storer['tc_loss'].append(tc_loss.item()) storer['dw_kl_loss'].append(dw_kl_loss.item()) # TODO Remove this when visualisation fixed tc_loss_vec = (logqz -", "# minibatch weighted sampling _, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample1, self.data_size) else: # minibatch", "_kl_normal_loss(*latent_dist, storer) C = (linear_annealing(self.C_init, self.C_fin, self.n_train_steps, self.C_n_interp) if is_train else self.C_fin) loss", "height, width). distribution : {\"bernoulli\", \"gaussian\", \"laplace\"} Distribution of the likelihood on the", "model.training else 1) # TODO replace this code with the following commented out", "\"laplace\": # loss in [0,255] space but normalized by 255 to not be", "Burgess, <NAME>., et al. \"Understanding disentangling in $\\beta$-VAE.\" arXiv preprint arXiv:1804.03599 (2018). \"\"\"", "the paper they sample 2 batches batch_size = data.size(dim=0) half_batch_size = batch_size //", "else: storer = None return storer class BetaHLoss(BaseLoss): \"\"\" Compute the Beta-VAE loss", "z_perm = _permute_dims(latent_sample2).detach() d_z_perm = self.discriminator(z_perm) # Calculate total correlation loss d_tc_loss =", "is fixed # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 if self.is_mutual_info: # return vae loss vae_loss = rec_loss", "6, celeba: 15 self.device = device self.dataset_size = data_size self.beta = beta self.alpha", "for losses. Parameters ---------- record_loss_every: int, optional Every how many steps to recorsd", "code after viz is fixed # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 if self.is_mutual_info: # return vae loss", "[1] Parameters ---------- data_size: int Size of the dataset alpha : float Weight", "# TODO Remove this when visualisation fixed tc_loss_vec = (logqz - logqz_prodmarginals) for", "= mean.size(1) # batch mean of kl for each latent dimension latent_kl =", "mi_loss + self.beta * tc_loss + self.gamma * dw_kl_loss) if storer is not", "1) + _logqz, dim=1, keepdim=False).sum(1) return logqz, logqz_prodmarginals def _reconstruction_loss(data, recon_data, distribution=\"bernoulli\", storer=None):", "kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Chen, <NAME>, et", "distribution == \"laplace\": # loss in [0,255] space but normalized by 255 to", "self._pre_call(is_train, storer) rec_loss = _reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer) anneal_rec", "disvae.discriminator.Discriminator optimizer_d : torch.optim kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References ----------", "viz is fixed # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 if self.is_mutual_info: # return vae loss vae_loss =", "bool True : includes the mutual information term in the loss False :", "latent_dim = mean.size(1) # batch mean of kl for each latent dimension latent_kl", "data_size): \"\"\" Estimates log q(z) and the log (product of marginals of q(z_j))", "interpolating C. gamma : float, optional Weight of the KL divergence term. kwargs:", "dim=2) # calculate log p(z) prior_params = torch.zeros(half_batch_size, latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample1,", "storer, latent_sample=None): storer = self._pre_call(is_train, storer) batch_size = data.size(0) # change latent dist", "255, is the same as not doing anything for L1 loss = F.l1_loss(recon_data,", "distribution. logvar : torch.Tensor Diagonal log variance of the normal distribution. Shape (batch_size,", "distribution with diagonal covariance and a unit normal distribution. Parameters ---------- mean :", "if storer is not None: storer['recon_loss'].append(loss.item()) return loss def _kl_normal_loss(mean, logvar, storer=None): \"\"\"", "storer is not None: storer['kl_loss'].append(total_kl.item()) for i in range(latent_dim): storer['kl_loss_' + str(i)].append(latent_kl[i].item()) return", "= is_mutual_info self.is_mss = is_mss self.discriminator = Discriminator(**disc_kwargs).to(self.device) self.optimizer_d = optim.Adam(self.discriminator.parameters(), **optim_kwargs) def", "train ecause it ends up focusing only a few pixels that are very", "Additional arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Kim, Hyunjik, and <NAME>.", "p(z) prior_params = torch.zeros(half_batch_size, latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample1, prior_params, half_batch_size, return_matrix=False).view(half_batch_size, -1).sum(1)", "term is included in `_kl_normal_loss` # beta = self.beta + 1 # kl_loss", "== 3 if distribution == \"bernoulli\": loss = F.binary_cross_entropy(recon_data, data, reduction=\"sum\") elif distribution", "if self.is_mutual_info: # beta = self.beta # kl_loss = _kl_normal_loss(*latent_dist, storer) # else:", "rec_dist`. \"\"\" def __init__(self, beta=4, **kwargs): super().__init__(**kwargs) self.beta = beta def __call__(self, data,", "not None: storer['loss'].append(loss.item()) return loss class FactorKLoss(BaseLoss): \"\"\" Compute the Factor-VAE loss as", "= (logqz_prodmarginals - logpz).mean() anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train else", "sampling _, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample1, self.data_size) gamma = self.gamma + 1 dw_kl_loss", "= rec_loss + anneal_rec * (self.beta * kl_loss) if storer is not None:", "torch from torch.nn import functional as F from torch import optim from .discriminator", "optional Weight of the TC loss term. `gamma` in the paper. is_mutual_info :", "= latent_sample[pi, z] return perm def linear_annealing(init, fin, step, annealing_steps): \"\"\"Linear annealing of", "(could probably avoid this) latent_dist = torch.stack((latent_dist[0], latent_dist[1]), dim=2) # calculate log q(z|x)", "storer=None): \"\"\" Calculates the per image reconstruction loss for a batch of data.", "+= 1 if not is_train or self.n_train_steps % self.record_loss_every == 1: storer =", "2 of [1] Parameters ---------- device : torch.device beta : float, optional Weight", "\"batchTC\": return BatchTCLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], alpha=kwargs_parse[\"batchTC_A\"], beta=kwargs_parse[\"batchTC_B\"], gamma=kwargs_parse[\"batchTC_G\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) else: raise ValueError(\"Uknown", "+ anneal_rec * (self.alpha * mi_loss + self.beta * tc_loss + self.gamma *", "but normalized by 255 to not be too big but # multiply by", "storer) anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train else 1) loss =", "recon_batch, latent_dist, latent_sample1 = model(data1) rec_loss = _reconstruction_loss(data1, recon_batch, storer=storer, distribution=self.rec_dist) # TODO:", "= self.discriminator(z_perm) # Calculate total correlation loss d_tc_loss = - (0.5 * (F.logsigmoid(d_z)", "distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer) anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train", "in variational autoencoders.\" Advances in Neural Information Processing Systems. 2018. \"\"\" batch_size =", "in which to store important variables for vizualisation. \"\"\" latent_dim = mean.size(1) #", "latent_sample, self.dataset_size) else: # minibatch stratified sampling logqz, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample, self.dataset_size)", "annealing steps where gradually adding the regularisation. \"\"\" def __init__(self, record_loss_every=50, rec_dist=\"bernoulli\", steps_anneal=0):", "important variables for vizualisation. \"\"\" def _pre_call(self, is_train, storer): if is_train: self.n_train_steps +=", "total correlation loss d_tc_loss = - (0.5 * (F.logsigmoid(d_z) + F.logsigmoid(1 - d_z_perm))).mean()", "self.gamma * dw_kl_loss) if storer is not None: storer['loss'].append(loss.item()) storer['mi_loss'].append(mi_loss.item()) storer['tc_loss'].append(tc_loss.item()) storer['dw_kl_loss'].append(dw_kl_loss.item()) #", "= loss / batch_size if storer is not None: storer['recon_loss'].append(loss.item()) return loss def", "to train ecause it ends up focusing only a few pixels that are", "# TO-DO: clean data_size and device def get_loss_f(name, kwargs_parse={}): \"\"\"Return the correct loss", "if is_train: self.n_train_steps += 1 if not is_train or self.n_train_steps % self.record_loss_every ==", "Parameters ---------- beta : float, optional Weight of the kl divergence. References: [1]", "to MSE, Laplace corresponds to L1. steps_anneal: nool, optional Number of annealing steps", "recon_data, storer=storer, distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer) C = (linear_annealing(self.C_init, self.C_fin, self.n_train_steps, self.C_n_interp)", "self.steps_anneal) if is_train else 1) # total loss loss = rec_loss + anneal_rec", ": float Weight of the mutual information term. beta : float Weight of", ": float, optional Weight of the TC loss term. `gamma` in the paper.", "to [1] Parameters ---------- data_size: int Size of the dataset alpha : float", "used. It has the issue that it doesn't penalize the same way (0.1,0.2)", "= _kl_normal_loss(*latent_dist, storer) anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train else 1)", "gamma=kwargs_parse[\"factor_G\"], is_mutual_info=not kwargs_parse[\"no_mutual_info\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) elif name == \"batchTC\": return BatchTCLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"],", "= self.gamma + 1 dw_kl_loss = (logqz_prodmarginals - logpz).mean() vae_loss = rec_loss +", "TC term is included in `_kl_normal_loss` # beta = self.beta + 1 #", "kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Burgess, <NAME>., et", "\"VAE\": return BetaHLoss(beta=1, **kwargs_all) elif name == \"betaB\": return BetaBLoss(C_init=kwargs_parse[\"betaB_initC\"], C_fin=kwargs_parse[\"betaB_finC\"], C_n_interp=kwargs_parse[\"betaB_stepsC\"], gamma=kwargs_parse[\"betaB_G\"],", "self.is_mss = is_mss self.discriminator = Discriminator(**disc_kwargs).to(self.device) self.optimizer_d = optim.Adam(self.discriminator.parameters(), **optim_kwargs) def __call__(self, data,", "= batch_size // 2 data = data.split(half_batch_size) data1 = data[0] data2 = data[1]", "in range(latent_dim): storer['kl_loss_' + str(i)].append(latent_kl[i].item()) return total_kl def _permute_dims(latent_sample): \"\"\" Implementation of Algorithm", "for gaussian (mean, log_var) each of shape : (batch_size, latent_dim). storer : dict", "important variables for vizualisation. \"\"\" latent_dim = mean.size(1) # batch mean of kl", "float, optional Weight of the kl divergence. References: [1] Higgins, Irina, et al.", "BetaBLoss(BaseLoss): \"\"\" Compute the Beta-VAE loss as in [1] Parameters ---------- C_init :", "per Algorithm 2 of [1] Parameters ---------- device : torch.device beta : float,", "raise ValueError(\"Uknown loss : {}\".format(name)) class BaseLoss(abc.ABC): \"\"\" Base class for losses. Parameters", "storer is not None: storer['loss'].append(loss.item()) return loss class FactorKLoss(BaseLoss): \"\"\" Compute the Factor-VAE", "n_chan, height, width). distribution : {\"bernoulli\", \"gaussian\", \"laplace\"} Distribution of the likelihood on", "avoid this) latent_dist = torch.stack((latent_dist[0], latent_dist[1]), dim=2) # calculate log q(z|x) and _log", "of q(z_j)) with minibatch weighted sampling. Parameters ---------- latent_dist : torch.Tensor Mean and", "Weight of the KL divergence term. kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`.", "= log_density_normal(latent_sample, prior_params, batch_size, return_matrix=False).view(batch_size, -1).sum(1) if not self.is_mss: # minibatch weighted sampling", "et al. \"Understanding disentangling in $\\beta$-VAE.\" arXiv preprint arXiv:1804.03599 (2018). \"\"\" def __init__(self,", "F.logsigmoid(1 - d_z)).clamp(0).mean() anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if model.training else 1)", "References ---------- [1] <NAME>, and <NAME>. \"Disentangling by factorising.\" arXiv preprint arXiv:1802.05983 (2018).", "+ str(i)].append(tc_loss_vec[i].item()) return loss def _minibatch_weighted_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates log q(z) and", "the normal distribution. Shape (batch_size, latent_dim) storer : dict Dictionary in which to", "[0,255] space but normalized by 255 to not be too big loss =", "fin > init delta = fin - init annealed = min(init + delta", "FactorKLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], gamma=kwargs_parse[\"factor_G\"], is_mutual_info=not kwargs_parse[\"no_mutual_info\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) elif name == \"batchTC\": return", "keepdim=False) \\ - math.log(batch_size * data_size) return logqz, logqz_prodmarginals def _minibatch_stratified_sampling(latent_dist, latent_sample, data_size):", "the kl divergence. References: [1] Higgins, Irina, et al. \"beta-vae: Learning basic visual", "to 0 because TC cannot be negative : TEST tc_loss = (F.logsigmoid(d_z) -", "weighted sampling _, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample1, self.data_size) else: # minibatch stratified sampling", "1 dw_kl_loss = (logqz_prodmarginals - logpz).mean() vae_loss = rec_loss + anneal_rec * (gamma", "Every how many steps to recorsd the loss. rec_dist: {\"bernoulli\", \"gaussian\", \"laplace\"}, optional", "of annealing steps where gradually adding the regularisation. \"\"\" def __init__(self, record_loss_every=50, rec_dist=\"bernoulli\",", "recorsd the loss. rec_dist: {\"bernoulli\", \"gaussian\", \"laplace\"}, optional Reconstruction distribution istribution of the", "storer['kl_loss'].append(total_kl.item()) for i in range(latent_dim): storer['kl_loss_' + str(i)].append(latent_kl[i].item()) return total_kl def _permute_dims(latent_sample): \"\"\"", "[1] Burgess, <NAME>., et al. \"Understanding disentangling in $\\beta$-VAE.\" arXiv preprint arXiv:1804.03599 (2018).", "as F from torch import optim from .discriminator import Discriminator from disvae.utils.math import", "of images). Shape : (batch_size, n_chan, height, width). recon_data : torch.Tensor Reconstructed data.", "the KL divergence term. kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References ----------", "= steps_anneal @abc.abstractmethod def __call__(self, data, recon_data, latent_dist, is_train, storer): \"\"\" Calculates loss", "self.optimizer_d.step() if storer is not None: storer['discrim_loss'].append(d_tc_loss.item()) return vae_loss class BatchTCLoss(BaseLoss): \"\"\" Compute", "\"Isolating sources of disentanglement in variational autoencoders.\" Advances in Neural Information Processing Systems.", "defines the loss Bernoulli corresponds to a binary cross entropy (bse) loss and", "z] return perm def linear_annealing(init, fin, step, annealing_steps): \"\"\"Linear annealing of a parameter.\"\"\"", "total correlation term. gamma : float Weight of the dimension-wise KL term. latent_dim:", "1, self.n_train_steps, self.steps_anneal) if is_train else 1) # total loss loss = rec_loss", "not self.is_mss: # minibatch weighted sampling _, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample1, self.data_size) else:", "vizualisation. Returns ------- loss : torch.Tensor Per image cross entropy (i.e. normalized per", "term in the loss False : removes mutual information discriminator : disvae.discriminator.Discriminator optimizer_d", "Module containing all vae losses. \"\"\" import abc import math import torch from", "\"\"\" Compute the Beta-VAE loss as in [1] Parameters ---------- C_init : float,", "sampling _, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample1, self.data_size) else: # minibatch stratified sampling _,", "---------- mean : torch.Tensor Mean of the normal distribution. Shape (batch_size, latent_dim) where", "in [0,255] space but normalized by 255 to not be too big loss", "beta * tc_loss if storer is not None: storer['loss'].append(vae_loss.item()) storer['tc_loss'].append(tc_loss.item()) if not model.training:", "= self.discriminator(latent_sample1) # clamping to 0 because TC cannot be negative : TEST", "the issue of MSE. storer : dict Dictionary in which to store important", "multiply by 255 and divide 255, is the same as not doing anything", "arXiv:1802.05983 (2018). \"\"\" perm = torch.zeros_like(latent_sample) batch_size, dim_z = perm.size() for z in", "C_fin=5., C_n_interp=25000, gamma=30., **kwargs): super().__init__(**kwargs) self.gamma = gamma self.C_init = C_init self.C_fin =", "decomposed KL loss with either minibatch weighted sampling or minibatch stratified sampling according", "= (linear_annealing(self.C_init, self.C_fin, self.n_train_steps, self.C_n_interp) if is_train else self.C_fin) loss = rec_loss +", "1, self.n_train_steps, self.steps_anneal) if model.training else 1) # TODO replace this code with", "loss vae_loss = rec_loss + anneal_rec * (kl_loss + self.gamma * tc_loss) else:", "shape : (batch_size, latent_dim). data_size : int Number of data in the training", "the likelihood on the each pixel. Implicitely defines the reconstruction loss. Bernoulli corresponds", "term. beta : float Weight of the total correlation term. gamma : float", "is not None: storer['loss'].append(loss.item()) return loss class FactorKLoss(BaseLoss): \"\"\" Compute the Factor-VAE loss", "C_n_interp : float, optional Number of training iterations for interpolating C. gamma :", "_minibatch_weighted_sampling(latent_dist, latent_sample, self.dataset_size) else: # minibatch stratified sampling logqz, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample,", "discriminator optimizer self.optimizer_d.zero_grad() d_tc_loss.backward() self.optimizer_d.step() if storer is not None: storer['discrim_loss'].append(d_tc_loss.item()) return vae_loss", "= rec_loss + self.gamma * (kl_loss - C).abs() batch_size = data.size(0) if storer", "# return vae loss vae_loss = rec_loss + anneal_rec * (kl_loss + self.gamma", "- C).abs() batch_size = data.size(0) if storer is not None: storer['loss'].append(loss.item()) return loss", "data_size, gamma=40., is_mutual_info=True, is_mss=False, disc_kwargs=dict(neg_slope=0.2, latent_dim=10, hidden_units=1000), optim_kwargs=dict(lr=5e-4, betas=(0.5, 0.9)), **kwargs): super().__init__(**kwargs) self.gamma", "normal distribution. Shape (batch_size, latent_dim) where D is dimension of distribution. logvar :", "normal distribution. Shape (batch_size, latent_dim) storer : dict Dictionary in which to store", "keepdim=False) logqz_prodmarginals = torch.logsumexp(logiw_matrix.view(batch_size, batch_size, 1) + _logqz, dim=1, keepdim=False).sum(1) return logqz, logqz_prodmarginals", "= model.sample_latent(data2) z_perm = _permute_dims(latent_sample2).detach() d_z_perm = self.discriminator(z_perm) # Calculate total correlation loss", "Compute the Factor-VAE loss as per Algorithm 2 of [1] Parameters ---------- device", "d_z = self.discriminator(latent_sample1) # clamping to 0 because TC cannot be negative :", "self.steps_anneal) if model.training else 1) # TODO replace this code with the following", "normalized by 255 to not be too big but # multiply by 255", "autoencoders.\" Advances in Neural Information Processing Systems. 2018. \"\"\" batch_size = latent_dist.size(0) _logqz", "gaussian (mean, log_var) each of shape : (batch_size, latent_dim). storer : dict Dictionary", "clamping to 0 because TC cannot be negative : TEST tc_loss = (F.logsigmoid(d_z)", "prior_params, half_batch_size, return_matrix=False).view(half_batch_size, -1).sum(1) if not self.is_mss: # minibatch weighted sampling _, logqz_prodmarginals", "\"\"\" Implementation of Algorithm 1 in ref [1]. Randomly permutes the sample from", "a constrained variational framework.\" (2016). kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. \"\"\"", "sorted # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 kl_loss = _kl_normal_loss(*latent_dist, storer) d_z = self.discriminator(latent_sample1) # clamping to", "255 and divide 255, is the same as not doing anything for L1", "kl_loss = _kl_normal_loss(*latent_dist, storer) d_z = self.discriminator(latent_sample1) # clamping to 0 because TC", "Laplace distribution corresponds to L1 solves partially the issue of MSE. storer :", "**kwargs_all) elif name == \"batchTC\": return BatchTCLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], alpha=kwargs_parse[\"batchTC_A\"], beta=kwargs_parse[\"batchTC_B\"], gamma=kwargs_parse[\"batchTC_G\"], is_mss=not kwargs_parse[\"no_mss\"],", "**kwargs): super().__init__(**kwargs) self.gamma = gamma self.C_init = C_init self.C_fin = C_fin self.C_n_interp =", "(F.logsigmoid(d_z) + F.logsigmoid(1 - d_z_perm))).mean() # Run discriminator optimizer self.optimizer_d.zero_grad() d_tc_loss.backward() self.optimizer_d.step() if", "minibatch stratified sampling (True) or minibatch weighted sampling (False) kwargs: Additional arguments for", "each latent dimension latent_kl = 0.5 * (-1 - logvar + mean.pow(2) +", "= _permute_dims(latent_sample2).detach() d_z_perm = self.discriminator(z_perm) # Calculate total correlation loss d_tc_loss = -", "# total loss loss = rec_loss + anneal_rec * (self.alpha * mi_loss +", ": disvae.discriminator.Discriminator optimizer_d : torch.optim kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References", "latent_dim=10, hidden_units=1000), optim_kwargs=dict(lr=5e-4, betas=(0.5, 0.9)), **kwargs): super().__init__(**kwargs) self.gamma = gamma self.data_size = data_size", "one for correct comparaison # # as the TC term is included in", "for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Kim, Hyunjik, and <NAME>. \"Disentangling by", "arXiv preprint arXiv:1802.05983 (2018). \"\"\" perm = torch.zeros_like(latent_sample) batch_size, dim_z = perm.size() for", "_permute_dims(latent_sample): \"\"\" Implementation of Algorithm 1 in ref [1]. Randomly permutes the sample", "latent_dist, is_train, storer): \"\"\" Calculates loss for a batch of data. Parameters ----------", "loss def _minibatch_weighted_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates log q(z) and the log (product", "= _kl_normal_loss(*latent_dist, storer) # else: # # beta has to be increased by", "kwargs_parse[\"data_size\"], gamma=kwargs_parse[\"factor_G\"], is_mutual_info=not kwargs_parse[\"no_mutual_info\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) elif name == \"batchTC\": return BatchTCLoss(kwargs_parse[\"device\"],", ": removes mutual information discriminator : disvae.discriminator.Discriminator optimizer_d : torch.optim kwargs: Additional arguments", "log_importance_weight_matrix # TO-DO: clean data_size and device def get_loss_f(name, kwargs_parse={}): \"\"\"Return the correct", "(linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train else 1) # total loss loss =", "(batch_size, latent_dim) storer : dict Dictionary in which to store important variables for", "discriminator : disvae.discriminator.Discriminator optimizer_d : torch.optim kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`.", "latent dist to torch.tensor (could probably avoid this) latent_dist = torch.stack((latent_dist[0], latent_dist[1]), dim=2)", "preprint arXiv:1802.05983 (2018). \"\"\" def __init__(self, device, data_size, gamma=40., is_mutual_info=True, is_mss=False, disc_kwargs=dict(neg_slope=0.2, latent_dim=10,", "rec_loss = _reconstruction_loss(data, recon_batch, storer=storer, distribution=self.rec_dist) mi_loss = (logqz_condx - logqz).mean() tc_loss =", "\"\"\"Return the correct loss function given the argparse arguments.\"\"\" kwargs_all = dict(rec_dist=kwargs_parse[\"rec_dist\"], steps_anneal=kwargs_parse[\"reg_anneal\"])", ": float, optional Starting annealed capacity C. C_fin : float, optional Final annealed", "(logqz - logqz_prodmarginals) for i in range(latent_dist.size(1)): storer['kl_loss_' + str(i)].append(tc_loss_vec[i].item()) return loss def", "stratified sampling def __call__(self, data, recon_batch, latent_dist, is_train, storer, latent_sample=None): storer = self._pre_call(is_train,", "storer=storer, distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer) anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if", "import optim from .discriminator import Discriminator from disvae.utils.math import log_density_normal, log_importance_weight_matrix # TO-DO:", "in [1] Parameters ---------- C_init : float, optional Starting annealed capacity C. C_fin", "return BetaHLoss(beta=kwargs_parse[\"betaH_B\"], **kwargs_all) elif name == \"VAE\": return BetaHLoss(beta=1, **kwargs_all) elif name ==", "# batch mean of kl for each latent dimension latent_kl = 0.5 *", "if storer is not None: storer['kl_loss'].append(total_kl.item()) for i in range(latent_dim): storer['kl_loss_' + str(i)].append(latent_kl[i].item())", "statistics of the latent dimension. E.g. for gaussian (mean, log_var) each of shape", "Calculate total correlation loss d_tc_loss = - (0.5 * (F.logsigmoid(d_z) + F.logsigmoid(1 -", "C_init : float, optional Starting annealed capacity C. C_fin : float, optional Final", "C).abs() batch_size = data.size(0) if storer is not None: storer['loss'].append(loss.item()) return loss class", "[1] Higgins, Irina, et al. \"beta-vae: Learning basic visual concepts with a constrained", "latent_sample=None): storer = self._pre_call(is_train, storer) batch_size = data.size(0) # change latent dist to", "# multiply by 255 and divide 255, is the same as not doing", "return fin assert fin > init delta = fin - init annealed =", "e.g. rec_dist`. References ---------- [1] Burgess, <NAME>., et al. \"Understanding disentangling in $\\beta$-VAE.\"", "<NAME>., et al. \"Understanding disentangling in $\\beta$-VAE.\" arXiv preprint arXiv:1804.03599 (2018). \"\"\" def", "dim=1, keepdim=False) logqz_prodmarginals = torch.logsumexp(logiw_matrix.view(batch_size, batch_size, 1) + _logqz, dim=1, keepdim=False).sum(1) return logqz,", "i in range(latent_dim): storer['kl_loss_' + str(i)].append(latent_kl[i].item()) return total_kl def _permute_dims(latent_sample): \"\"\" Implementation of", "storer) rec_loss = _reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer) C =", "TEST tc_loss = (F.logsigmoid(d_z) - F.logsigmoid(1 - d_z)).clamp(0).mean() anneal_rec = (linear_annealing(0, 1, self.n_train_steps,", "= data.size(0) # change latent dist to torch.tensor (could probably avoid this) latent_dist", "0.9)), **kwargs): super().__init__(**kwargs) self.gamma = gamma self.data_size = data_size self.device = device self.is_mutual_info", "= is_mss self.discriminator = Discriminator(**disc_kwargs).to(self.device) self.optimizer_d = optim.Adam(self.discriminator.parameters(), **optim_kwargs) def __call__(self, data, model,", "kl_loss = _kl_normal_loss(*latent_dist, storer) # else: # # beta has to be increased", "beta values: dsprites: 6, celeba: 15 self.device = device self.dataset_size = data_size self.beta", "= storer else: storer = None return storer class BetaHLoss(BaseLoss): \"\"\" Compute the", "self.is_mss = is_mss # minibatch stratified sampling def __call__(self, data, recon_batch, latent_dist, is_train,", "where D is dimension of distribution. logvar : torch.Tensor Diagonal log variance of", "= torch.randperm(batch_size).to(latent_sample.device) perm[:, z] = latent_sample[pi, z] return perm def linear_annealing(init, fin, step,", "**optim_kwargs) def __call__(self, data, model, optimizer, storer): storer = self._pre_call(model.training, storer) # factor-vae", "_dimwise_kl_loss(*latent_dist, storer) # # vae_loss = rec_loss + kl_loss + beta * tc_loss", "z] = latent_sample[pi, z] return perm def linear_annealing(init, fin, step, annealing_steps): \"\"\"Linear annealing", "dim=1, keepdim=False) - math.log(batch_size * data_size)).sum(dim=1) logqz = torch.logsumexp(_logqz.sum(2), dim=1, keepdim=False) \\ -", "fixed tc_loss_vec = (logqz - logqz_prodmarginals) for i in range(latent_dist.size(1)): storer['kl_loss_' + str(i)].append(tc_loss_vec[i].item())", "up focusing only a few pixels that are very wrong. Laplace distribution corresponds", "log_density_normal, log_importance_weight_matrix # TO-DO: clean data_size and device def get_loss_f(name, kwargs_parse={}): \"\"\"Return the", "is_train: self.n_train_steps += 1 if not is_train or self.n_train_steps % self.record_loss_every == 1:", "latent dimension using the reparameterisation trick shape : (batch_size, latent_dim). data_size : int", "and logvar of the normal distribution. Shape (batch_size, latent_dim, 2) latent_sample: torch.Tensor sample", "3 if distribution == \"bernoulli\": loss = F.binary_cross_entropy(recon_data, data, reduction=\"sum\") elif distribution ==", "for i in range(latent_dim): storer['kl_loss_' + str(i)].append(latent_kl[i].item()) return total_kl def _permute_dims(latent_sample): \"\"\" Implementation", "[1] Parameters ---------- device : torch.device beta : float, optional Weight of the", "\"\"\" Compute the Factor-VAE loss as per Algorithm 2 of [1] Parameters ----------", "of the dimension-wise KL term. latent_dim: int Dimension of the latent variable is_mss", "(torch.logsumexp(_logqz, dim=1, keepdim=False) - math.log(batch_size * data_size)).sum(dim=1) logqz = torch.logsumexp(_logqz.sum(2), dim=1, keepdim=False) \\", "init annealed = min(init + delta * step / annealing_steps, fin) return annealed", "is_mutual_info self.is_mss = is_mss self.discriminator = Discriminator(**disc_kwargs).to(self.device) self.optimizer_d = optim.Adam(self.discriminator.parameters(), **optim_kwargs) def __call__(self,", "_kl_normal_loss(*latent_dist, storer) d_z = self.discriminator(latent_sample1) # clamping to 0 because TC cannot be", "return total_kl def _permute_dims(latent_sample): \"\"\" Implementation of Algorithm 1 in ref [1]. Randomly", "+ anneal_rec * (kl_loss + self.gamma * tc_loss) else: # return vae loss", "self.discriminator(z_perm) # Calculate total correlation loss d_tc_loss = - (0.5 * (F.logsigmoid(d_z) +", "dw_kl_loss = (logqz_prodmarginals - logpz).mean() anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train", "= perm.size() for z in range(dim_z): pi = torch.randperm(batch_size).to(latent_sample.device) perm[:, z] = latent_sample[pi,", "on the each pixel. Implicitely defines the loss Bernoulli corresponds to a binary", "distribution corresponds to L1 solves partially the issue of MSE. storer : dict", "kwargs_parse[\"no_mutual_info\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) elif name == \"batchTC\": return BatchTCLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], alpha=kwargs_parse[\"batchTC_A\"], beta=kwargs_parse[\"batchTC_B\"],", "Parameters ---------- data_size: int Size of the dataset alpha : float Weight of", "\"Understanding disentangling in $\\beta$-VAE.\" arXiv preprint arXiv:1804.03599 (2018). \"\"\" def __init__(self, C_init=0., C_fin=5.,", "# https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 kl_loss = _kl_normal_loss(*latent_dist, storer) d_z = self.discriminator(latent_sample1) # clamping to 0", "optional Starting annealed capacity C. C_fin : float, optional Final annealed capacity C.", "# as the TC term is included in `_kl_normal_loss` # beta = self.beta", "concepts with a constrained variational framework.\" (2016). kwargs: Additional arguments for `BaseLoss`, e.g.", "self._pre_call(is_train, storer) batch_size = data.size(0) # change latent dist to torch.tensor (could probably", "keepdim=False) - math.log(batch_size * data_size)).sum(dim=1) logqz = torch.logsumexp(_logqz.sum(2), dim=1, keepdim=False) \\ - math.log(batch_size", "# # as the TC term is included in `_kl_normal_loss` # beta =", "permutes the sample from q(z) (latent_dist) across the batch for each of the", "batch_size if storer is not None: storer['recon_loss'].append(loss.item()) return loss def _kl_normal_loss(mean, logvar, storer=None):", "variables for vizualisation. \"\"\" latent_dim = mean.size(1) # batch mean of kl for", "width). recon_data : torch.Tensor Reconstructed data. Shape : (batch_size, n_chan, height, width). distribution", ": (batch_size, latent_dim). storer : dict Dictionary in which to store important variables", "data_size: int Size of the dataset alpha : float Weight of the mutual", "BetaHLoss(beta=1, **kwargs_all) elif name == \"betaB\": return BetaBLoss(C_init=kwargs_parse[\"betaB_initC\"], C_fin=kwargs_parse[\"betaB_finC\"], C_n_interp=kwargs_parse[\"betaB_stepsC\"], gamma=kwargs_parse[\"betaB_G\"], **kwargs_all) elif", "data.size(dim=0) half_batch_size = batch_size // 2 data = data.split(half_batch_size) data1 = data[0] data2", "(self.alpha * mi_loss + self.beta * tc_loss + self.gamma * dw_kl_loss) if storer", "(2018). \"\"\" def __init__(self, C_init=0., C_fin=5., C_n_interp=25000, gamma=30., **kwargs): super().__init__(**kwargs) self.gamma = gamma", "else 1) # TODO replace this code with the following commented out code", "the decomposed KL loss with either minibatch weighted sampling or minibatch stratified sampling", "loss = F.binary_cross_entropy(recon_data, data, reduction=\"sum\") elif distribution == \"gaussian\": # loss in [0,255]", "None: storer['loss'].append(loss.item()) return loss class FactorKLoss(BaseLoss): \"\"\" Compute the Factor-VAE loss as per", "+ dw_kl_loss) # if self.is_mutual_info: # beta = self.beta # kl_loss = _kl_normal_loss(*latent_dist,", "[1] Parameters ---------- beta : float, optional Weight of the kl divergence. References:", "the loss False : removes mutual information discriminator : disvae.discriminator.Discriminator optimizer_d : torch.optim", "(bse) loss and is the most commonly used. It has the issue that", "Loss # Get second sample of latent distribution latent_sample2 = model.sample_latent(data2) z_perm =", "\"\"\" def __init__(self, device, data_size, alpha=1., beta=6., gamma=1., is_mss=False, **kwargs): super().__init__(**kwargs) # beta", "comparaison # # as the TC term is included in `_kl_normal_loss` # beta", "torch.zeros_like(latent_sample) batch_size, dim_z = perm.size() for z in range(dim_z): pi = torch.randperm(batch_size).to(latent_sample.device) perm[:,", "as in [1] Parameters ---------- C_init : float, optional Starting annealed capacity C.", "= _minibatch_stratified_sampling(latent_dist, latent_sample1, self.data_size) gamma = self.gamma + 1 dw_kl_loss = (logqz_prodmarginals -", "self.record_loss_every == 1: storer = storer else: storer = None return storer class", "* (self.alpha * mi_loss + self.beta * tc_loss + self.gamma * dw_kl_loss) if", "tuple of torch.tensor sufficient statistics of the latent dimension. E.g. for gaussian (mean,", "cross entropy (bse) loss and is the most commonly used. It has the", "(batch_size, latent_dim) where D is dimension of distribution. logvar : torch.Tensor Diagonal log", "== \"betaH\": return BetaHLoss(beta=kwargs_parse[\"betaH_B\"], **kwargs_all) elif name == \"VAE\": return BetaHLoss(beta=1, **kwargs_all) elif", "self.record_loss_every = record_loss_every self.rec_dist = rec_dist self.steps_anneal = steps_anneal @abc.abstractmethod def __call__(self, data,", "minibatch weighted sampling. Parameters ---------- latent_dist : torch.Tensor Mean and logvar of the", "= self._pre_call(is_train, storer) batch_size = data.size(0) # change latent dist to torch.tensor (could", "information term in the loss False : removes mutual information discriminator : disvae.discriminator.Discriminator", "autoencoders.\" Advances in Neural Information Processing Systems. 2018. \"\"\" def __init__(self, device, data_size,", "factorising.\" arXiv preprint arXiv:1802.05983 (2018). \"\"\" def __init__(self, device, data_size, gamma=40., is_mutual_info=True, is_mss=False,", "width). latent_dist : tuple of torch.tensor sufficient statistics of the latent dimension. E.g.", "None: storer['kl_loss'].append(total_kl.item()) for i in range(latent_dim): storer['kl_loss_' + str(i)].append(latent_kl[i].item()) return total_kl def _permute_dims(latent_sample):", "Bernoulli corresponds to a binary cross entropy (bse) loss and is the most", "big loss = F.mse_loss(recon_data * 255, data * 255, reduction=\"sum\") / 255 elif", "ValueError(\"Uknown loss : {}\".format(name)) class BaseLoss(abc.ABC): \"\"\" Base class for losses. Parameters ----------", "Base class for losses. Parameters ---------- record_loss_every: int, optional Every how many steps", "self.data_size) else: # minibatch stratified sampling _, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample1, self.data_size) gamma", "optional Weight of the KL divergence term. kwargs: Additional arguments for `BaseLoss`, e.g.", "is_mss=False, **kwargs): super().__init__(**kwargs) # beta values: dsprites: 6, celeba: 15 self.device = device", "= gamma self.data_size = data_size self.device = device self.is_mutual_info = is_mutual_info self.is_mss =", "return loss def _minibatch_weighted_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates log q(z) and the log", "channel) \"\"\" batch_size, n_chan, height, width = recon_data.size() is_colored = n_chan == 3", "== \"VAE\": return BetaHLoss(beta=1, **kwargs_all) elif name == \"betaB\": return BetaBLoss(C_init=kwargs_parse[\"betaB_initC\"], C_fin=kwargs_parse[\"betaB_finC\"], C_n_interp=kwargs_parse[\"betaB_stepsC\"],", "---------- [1] Kim, Hyunjik, and <NAME>. \"Disentangling by factorising.\" arXiv preprint arXiv:1802.05983 (2018).", "# else: # # beta has to be increased by one for correct", "return vae loss without mutual information term # change latent dist to torch.tensor", "log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logqz_prodmarginals = (torch.logsumexp(_logqz, dim=1, keepdim=False) - math.log(batch_size * data_size)).sum(dim=1)", "---------- [1] <NAME>, and <NAME>. \"Disentangling by factorising.\" arXiv preprint arXiv:1802.05983 (2018). \"\"\"", "al. \"Understanding disentangling in $\\beta$-VAE.\" arXiv preprint arXiv:1804.03599 (2018). \"\"\" def __init__(self, C_init=0.,", "not None: storer['loss'].append(loss.item()) storer['mi_loss'].append(mi_loss.item()) storer['tc_loss'].append(tc_loss.item()) storer['dw_kl_loss'].append(dw_kl_loss.item()) # TODO Remove this when visualisation fixed", "else: raise ValueError(\"Unkown distribution: {}\".format(distribution)) loss = loss / batch_size if storer is", "math.log(batch_size * data_size) return logqz, logqz_prodmarginals def _minibatch_stratified_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates log", "normal distribution with diagonal covariance and a unit normal distribution. Parameters ---------- mean", "self.device = device self.is_mutual_info = is_mutual_info self.is_mss = is_mss self.discriminator = Discriminator(**disc_kwargs).to(self.device) self.optimizer_d", "log_var) each of shape : (batch_size, latent_dim). storer : dict Dictionary in which", "Higgins, Irina, et al. \"beta-vae: Learning basic visual concepts with a constrained variational", "torch.stack((latent_dist[0], latent_dist[1]), dim=2) # calculate log p(z) prior_params = torch.zeros(half_batch_size, latent_dist.size(1), 2).to(self.device) logpz", "rec_loss + anneal_rec * (self.beta * kl_loss) if storer is not None: storer['loss'].append(loss.item())", "data_size) return logqz, logqz_prodmarginals def _minibatch_stratified_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates log q(z) and", "sample from q(z) (latent_dist) across the batch for each of the latent dimensions", "if not model.training: # don't backprop if evaluating return vae_loss # Run VAE", "and <NAME>. \"Disentangling by factorising.\" arXiv preprint arXiv:1802.05983 (2018). \"\"\" def __init__(self, device,", "weighted sampling. Parameters ---------- latent_dist : torch.Tensor Mean and logvar of the normal", "preprint arXiv:1802.05983 (2018). \"\"\" perm = torch.zeros_like(latent_sample) batch_size, dim_z = perm.size() for z", "of MSE. storer : dict Dictionary in which to store important variables for", "it doesn't penalize the same way (0.1,0.2) and (0.4,0.5), which might not be", "- logqz).mean() tc_loss = (logqz - logqz_prodmarginals).mean() dw_kl_loss = (logqz_prodmarginals - logpz).mean() anneal_rec", "storer): storer = self._pre_call(is_train, storer) rec_loss = _reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist) kl_loss =", "MSE, Laplace corresponds to L1. steps_anneal: nool, optional Number of annealing steps where", "when visualisation fixed tc_loss_vec = (logqz - logqz_prodmarginals) for i in range(latent_dist.size(1)): storer['kl_loss_'", "the following commented out code after viz is fixed # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 if self.is_mutual_info:", "variables for vizualisation. Returns ------- loss : torch.Tensor Per image cross entropy (i.e.", "else self.C_fin) loss = rec_loss + self.gamma * (kl_loss - C).abs() batch_size =", "in [1] Parameters ---------- beta : float, optional Weight of the kl divergence.", "alpha=kwargs_parse[\"batchTC_A\"], beta=kwargs_parse[\"batchTC_B\"], gamma=kwargs_parse[\"batchTC_G\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) else: raise ValueError(\"Uknown loss : {}\".format(name)) class", "correlation term. gamma : float Weight of the dimension-wise KL term. latent_dim: int", "the most commonly used. It has the issue that it doesn't penalize the", "log_var). Parameters ---------- latent_sample: torch.Tensor sample from the latent dimension using the reparameterisation", "disentangling in $\\beta$-VAE.\" arXiv preprint arXiv:1804.03599 (2018). \"\"\" def __init__(self, C_init=0., C_fin=5., C_n_interp=25000,", "= _kl_normal_loss(*latent_dist, storer) d_z = self.discriminator(latent_sample1) # clamping to 0 because TC cannot", "width = recon_data.size() is_colored = n_chan == 3 if distribution == \"bernoulli\": loss", "return vae loss vae_loss = rec_loss + anneal_rec * (kl_loss + self.gamma *", "2).to(self.device) logpz = log_density_normal(latent_sample1, prior_params, half_batch_size, return_matrix=False).view(half_batch_size, -1).sum(1) if not self.is_mss: # minibatch", "beta=kwargs_parse[\"batchTC_B\"], gamma=kwargs_parse[\"batchTC_G\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) else: raise ValueError(\"Uknown loss : {}\".format(name)) class BaseLoss(abc.ABC):", "beta = self.beta + 1 # kl_loss = _dimwise_kl_loss(*latent_dist, storer) # # vae_loss", "return BetaHLoss(beta=1, **kwargs_all) elif name == \"betaB\": return BetaBLoss(C_init=kwargs_parse[\"betaB_initC\"], C_fin=kwargs_parse[\"betaB_finC\"], C_n_interp=kwargs_parse[\"betaB_stepsC\"], gamma=kwargs_parse[\"betaB_G\"], **kwargs_all)", "dist to torch.tensor (could probably avoid this) latent_dist = torch.stack((latent_dist[0], latent_dist[1]), dim=2) #", "avoid this) latent_dist = torch.stack((latent_dist[0], latent_dist[1]), dim=2) # calculate log p(z) prior_params =", "= self.beta + 1 # kl_loss = _dimwise_kl_loss(*latent_dist, storer) # # vae_loss =", "@abc.abstractmethod def __call__(self, data, recon_data, latent_dist, is_train, storer): \"\"\" Calculates loss for a", "* data_size) return logqz, logqz_prodmarginals def _minibatch_stratified_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates log q(z)", "latent_dim) storer : dict Dictionary in which to store important variables for vizualisation.", "def linear_annealing(init, fin, step, annealing_steps): \"\"\"Linear annealing of a parameter.\"\"\" if annealing_steps ==", "latent_sample: torch.Tensor sample from the latent dimension using the reparameterisation trick shape :", "= dict(rec_dist=kwargs_parse[\"rec_dist\"], steps_anneal=kwargs_parse[\"reg_anneal\"]) if name == \"betaH\": return BetaHLoss(beta=kwargs_parse[\"betaH_B\"], **kwargs_all) elif name ==", "is_mutual_info : bool True : includes the mutual information term in the loss", "is_mss self.discriminator = Discriminator(**disc_kwargs).to(self.device) self.optimizer_d = optim.Adam(self.discriminator.parameters(), **optim_kwargs) def __call__(self, data, model, optimizer,", "mutual information, total correlation and dim-wise kl rec_loss = _reconstruction_loss(data, recon_batch, storer=storer, distribution=self.rec_dist)", "ValueError(\"Unkown distribution: {}\".format(distribution)) loss = loss / batch_size if storer is not None:", "logqz = torch.logsumexp(_logqz.sum(2), dim=1, keepdim=False) \\ - math.log(batch_size * data_size) return logqz, logqz_prodmarginals", "e.g. rec_dist`. References ---------- [1] Chen, <NAME>, et al. \"Isolating sources of disentanglement", "the KL divergence between a normal distribution with diagonal covariance and a unit", "* (kl_loss + self.gamma * tc_loss) else: # return vae loss without mutual", "int Dimension of the latent variable is_mss : bool Selects either minibatch stratified", "range(latent_dim): storer['kl_loss_' + str(i)].append(latent_kl[i].item()) return total_kl def _permute_dims(latent_sample): \"\"\" Implementation of Algorithm 1", "latent_dist, is_train, storer, latent_sample=None): storer = self._pre_call(is_train, storer) batch_size = data.size(0) # change", "batch_size, return_matrix=False).view(batch_size, -1).sum(1) if not self.is_mss: # minibatch weighted sampling logqz, logqz_prodmarginals =", "anneal_rec * (self.alpha * mi_loss + self.beta * tc_loss + self.gamma * dw_kl_loss)", "device self.dataset_size = data_size self.beta = beta self.alpha = alpha self.gamma = gamma", "= torch.stack((latent_dist[0], latent_dist[1]), dim=2) # calculate log p(z) prior_params = torch.zeros(half_batch_size, latent_dist.size(1), 2).to(self.device)", "optim from .discriminator import Discriminator from disvae.utils.math import log_density_normal, log_importance_weight_matrix # TO-DO: clean", "entropy (i.e. normalized per batch but not pixel and channel) \"\"\" batch_size, n_chan,", "penalize the same way (0.1,0.2) and (0.4,0.5), which might not be optimal. Gaussian", "arguments for `BaseLoss`, e.g. rec_dist`. \"\"\" def __init__(self, beta=4, **kwargs): super().__init__(**kwargs) self.beta =", "Hyunjik, and <NAME>. \"Disentangling by factorising.\" arXiv preprint arXiv:1802.05983 (2018). \"\"\" def __init__(self,", "storer['mi_loss'].append(mi_loss.item()) storer['tc_loss'].append(tc_loss.item()) storer['dw_kl_loss'].append(dw_kl_loss.item()) # TODO Remove this when visualisation fixed tc_loss_vec = (logqz", "self.dataset_size) # rec loss, mutual information, total correlation and dim-wise kl rec_loss =", "the Beta-VAE loss as in [1] Parameters ---------- C_init : float, optional Starting", "torch.logsumexp(logiw_matrix + _logqz.sum(2), dim=1, keepdim=False) logqz_prodmarginals = torch.logsumexp(logiw_matrix.view(batch_size, batch_size, 1) + _logqz, dim=1,", "batch_size = latent_dist.size(0) _logqz = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logqz_prodmarginals = (torch.logsumexp(_logqz, dim=1,", "get_loss_f(name, kwargs_parse={}): \"\"\"Return the correct loss function given the argparse arguments.\"\"\" kwargs_all =", "Additional arguments for `BaseLoss`, e.g. rec_dist`. \"\"\" def __init__(self, beta=4, **kwargs): super().__init__(**kwargs) self.beta", "correlation and dim-wise kl rec_loss = _reconstruction_loss(data, recon_batch, storer=storer, distribution=self.rec_dist) mi_loss = (logqz_condx", "not None: storer['recon_loss'].append(loss.item()) return loss def _kl_normal_loss(mean, logvar, storer=None): \"\"\" Calculates the KL", "loss = loss / batch_size if storer is not None: storer['recon_loss'].append(loss.item()) return loss", "using the reparameterisation trick shape : (batch_size, latent_dim). References ---------- [1] <NAME>, and", "<NAME>. \"Disentangling by factorising.\" arXiv preprint arXiv:1802.05983 (2018). \"\"\" perm = torch.zeros_like(latent_sample) batch_size,", "def __call__(self, data, recon_data, latent_dist, is_train, storer): \"\"\" Calculates loss for a batch", "q(z) and the log (product of marginals of q(z_j)) with minibatch weighted sampling.", "`BaseLoss`, e.g. rec_dist`. \"\"\" def __init__(self, beta=4, **kwargs): super().__init__(**kwargs) self.beta = beta def", "Parameters ---------- latent_sample: torch.Tensor sample from the latent dimension using the reparameterisation trick", "basic visual concepts with a constrained variational framework.\" (2016). kwargs: Additional arguments for", "\"laplace\"} Distribution of the likelihood on the each pixel. Implicitely defines the loss", "name == \"VAE\": return BetaHLoss(beta=1, **kwargs_all) elif name == \"betaB\": return BetaBLoss(C_init=kwargs_parse[\"betaB_initC\"], C_fin=kwargs_parse[\"betaB_finC\"],", "of the KL divergence term. kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References", "class FactorKLoss(BaseLoss): \"\"\" Compute the Factor-VAE loss as per Algorithm 2 of [1]", "Run VAE optimizer optimizer.zero_grad() vae_loss.backward(retain_graph=True) optimizer.step() # Discriminator Loss # Get second sample", "= recon_data.size() is_colored = n_chan == 3 if distribution == \"bernoulli\": loss =", "class BetaBLoss(BaseLoss): \"\"\" Compute the Beta-VAE loss as in [1] Parameters ---------- C_init", ": torch.device beta : float, optional Weight of the TC loss term. `gamma`", "= rec_loss + anneal_rec * (kl_loss + self.gamma * tc_loss) else: # return", "vae loss without mutual information term # change latent dist to torch.tensor (could", "has the issue that it doesn't penalize the same way (0.1,0.2) and (0.4,0.5),", "remove this kl_loss term once viz is sorted # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 kl_loss = _kl_normal_loss(*latent_dist,", "self.is_mutual_info: # return vae loss vae_loss = rec_loss + anneal_rec * (kl_loss +", "(self.beta * kl_loss) if storer is not None: storer['loss'].append(loss.item()) return loss class BetaBLoss(BaseLoss):", "def get_loss_f(name, kwargs_parse={}): \"\"\"Return the correct loss function given the argparse arguments.\"\"\" kwargs_all", "q(z_j)) with minibatch stratified sampling. Parameters ---------- latent_dist : torch.Tensor Mean and logvar", "Starting annealed capacity C. C_fin : float, optional Final annealed capacity C. C_n_interp", "storer['kl_loss_' + str(i)].append(tc_loss_vec[i].item()) return loss def _minibatch_weighted_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates log q(z)", "latent dimension latent_kl = 0.5 * (-1 - logvar + mean.pow(2) + logvar.exp()).mean(dim=0)", "device def get_loss_f(name, kwargs_parse={}): \"\"\"Return the correct loss function given the argparse arguments.\"\"\"", "preprint arXiv:1804.03599 (2018). \"\"\" def __init__(self, C_init=0., C_fin=5., C_n_interp=25000, gamma=30., **kwargs): super().__init__(**kwargs) self.gamma", "{}\".format(distribution)) loss = loss / batch_size if storer is not None: storer['recon_loss'].append(loss.item()) return", ": float Weight of the dimension-wise KL term. latent_dim: int Dimension of the", "logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample1, self.data_size) else: # minibatch stratified sampling _, logqz_prodmarginals =", "elif distribution == \"laplace\": # loss in [0,255] space but normalized by 255", "# # vae_loss = rec_loss + kl_loss + beta * tc_loss if storer", "matrix logqz_condx = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=False).sum(dim=1) # calculate log p(z) prior_params =", "wrong. Laplace distribution corresponds to L1 solves partially the issue of MSE. storer", "of distribution. logvar : torch.Tensor Diagonal log variance of the normal distribution. Shape", "log p(z) prior_params = torch.zeros(half_batch_size, latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample1, prior_params, half_batch_size, return_matrix=False).view(half_batch_size,", "C_n_interp def __call__(self, data, recon_data, latent_dist, is_train, storer): storer = self._pre_call(is_train, storer) rec_loss", "the per image reconstruction loss for a batch of data. Parameters ---------- data", "if evaluating return vae_loss # Run VAE optimizer optimizer.zero_grad() vae_loss.backward(retain_graph=True) optimizer.step() # Discriminator", "_reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer) anneal_rec = (linear_annealing(0, 1, self.n_train_steps,", "# beta = self.beta # kl_loss = _kl_normal_loss(*latent_dist, storer) # else: # #", "+ _logqz.sum(2), dim=1, keepdim=False) logqz_prodmarginals = torch.logsumexp(logiw_matrix.view(batch_size, batch_size, 1) + _logqz, dim=1, keepdim=False).sum(1)", "data. Parameters ---------- data : torch.Tensor Input data (e.g. batch of images). Shape", "\"\"\" Calculates the KL divergence between a normal distribution with diagonal covariance and", "BetaHLoss(BaseLoss): \"\"\" Compute the Beta-VAE loss as in [1] Parameters ---------- beta :", "(kl_loss - C).abs() batch_size = data.size(0) if storer is not None: storer['loss'].append(loss.item()) return", "Number of training iterations for interpolating C. gamma : float, optional Weight of", "latent_dist = torch.stack((latent_dist[0], latent_dist[1]), dim=2) # calculate log p(z) prior_params = torch.zeros(half_batch_size, latent_dist.size(1),", "= record_loss_every self.rec_dist = rec_dist self.steps_anneal = steps_anneal @abc.abstractmethod def __call__(self, data, recon_data,", "binary cross entropy (bse) loss and is the most commonly used. It has", ".discriminator import Discriminator from disvae.utils.math import log_density_normal, log_importance_weight_matrix # TO-DO: clean data_size and", "# factor-vae split data into two batches. In the paper they sample 2", "(mean, log_var) each of shape : (batch_size, latent_dim). storer : dict Dictionary in", "batch mean of kl for each latent dimension latent_kl = 0.5 * (-1", "vae_loss # Run VAE optimizer optimizer.zero_grad() vae_loss.backward(retain_graph=True) optimizer.step() # Discriminator Loss # Get", "kl_loss term once viz is sorted # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 kl_loss = _kl_normal_loss(*latent_dist, storer) d_z", "loss and is the most commonly used. It has the issue that it", "to recorsd the loss. rec_dist: {\"bernoulli\", \"gaussian\", \"laplace\"}, optional Reconstruction distribution istribution of", "Per image cross entropy (i.e. normalized per batch but not pixel and channel)", "latent_dim: int Dimension of the latent variable is_mss : bool Selects either minibatch", "cross entropy (i.e. normalized per batch but not pixel and channel) \"\"\" batch_size,", "nool, optional Number of annealing steps where gradually adding the regularisation. \"\"\" def", "be negative : TEST tc_loss = (F.logsigmoid(d_z) - F.logsigmoid(1 - d_z)).clamp(0).mean() anneal_rec =", "BaseLoss(abc.ABC): \"\"\" Base class for losses. Parameters ---------- record_loss_every: int, optional Every how", "tc_loss + self.gamma * dw_kl_loss) if storer is not None: storer['loss'].append(loss.item()) storer['mi_loss'].append(mi_loss.item()) storer['tc_loss'].append(tc_loss.item())", "_logqz = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logqz_prodmarginals = (torch.logsumexp(_logqz, dim=1, keepdim=False) - math.log(batch_size", "without mutual information term # change latent dist to torch.tensor (could probably avoid", "math import torch from torch.nn import functional as F from torch import optim", "* tc_loss + self.gamma * dw_kl_loss) if storer is not None: storer['loss'].append(loss.item()) storer['mi_loss'].append(mi_loss.item())", "out code after viz is fixed # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 if self.is_mutual_info: # return vae", "delta = fin - init annealed = min(init + delta * step /", "and _log q(z) matrix logqz_condx = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=False).sum(dim=1) # calculate log", "+ 1 # kl_loss = _dimwise_kl_loss(*latent_dist, storer) # # vae_loss = rec_loss +", "- F.logsigmoid(1 - d_z)).clamp(0).mean() anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if model.training else", "C_fin=kwargs_parse[\"betaB_finC\"], C_n_interp=kwargs_parse[\"betaB_stepsC\"], gamma=kwargs_parse[\"betaB_G\"], **kwargs_all) elif name == \"factor\": return FactorKLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], gamma=kwargs_parse[\"factor_G\"], is_mutual_info=not", "for i in range(latent_dist.size(1)): storer['kl_loss_' + str(i)].append(tc_loss_vec[i].item()) return loss def _minibatch_weighted_sampling(latent_dist, latent_sample, data_size):", "total_kl def _permute_dims(latent_sample): \"\"\" Implementation of Algorithm 1 in ref [1]. Randomly permutes", "corresponds to a binary cross entropy (bse), Gaussian corresponds to MSE, Laplace corresponds", "the loss. rec_dist: {\"bernoulli\", \"gaussian\", \"laplace\"}, optional Reconstruction distribution istribution of the likelihood", "the total correlation term. gamma : float Weight of the dimension-wise KL term.", "latent_dim, 2) latent_sample: torch.Tensor sample from the latent dimension using the reparameterisation trick", "storer=storer, distribution=self.rec_dist) mi_loss = (logqz_condx - logqz).mean() tc_loss = (logqz - logqz_prodmarginals).mean() dw_kl_loss", "float, optional Starting annealed capacity C. C_fin : float, optional Final annealed capacity", "issue of MSE. storer : dict Dictionary in which to store important variables", "(bse), Gaussian corresponds to MSE, Laplace corresponds to L1. steps_anneal: nool, optional Number", "torch.optim kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Kim, Hyunjik,", "Discriminator(**disc_kwargs).to(self.device) self.optimizer_d = optim.Adam(self.discriminator.parameters(), **optim_kwargs) def __call__(self, data, model, optimizer, storer): storer =", "---------- data_size: int Size of the dataset alpha : float Weight of the", "anneal_rec * (self.beta * kl_loss) if storer is not None: storer['loss'].append(loss.item()) return loss", "Implementation of Algorithm 1 in ref [1]. Randomly permutes the sample from q(z)", "optimizer_d : torch.optim kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1]", "loss / batch_size if storer is not None: storer['recon_loss'].append(loss.item()) return loss def _kl_normal_loss(mean,", "`BaseLoss`, e.g. rec_dist`. References ---------- [1] Burgess, <NAME>., et al. \"Understanding disentangling in", "in Neural Information Processing Systems. 2018. \"\"\" batch_size = latent_dist.size(0) _logqz = log_density_normal(latent_sample,", "steps to recorsd the loss. rec_dist: {\"bernoulli\", \"gaussian\", \"laplace\"}, optional Reconstruction distribution istribution", ": torch.Tensor Per image cross entropy (i.e. normalized per batch but not pixel", "is included in `_kl_normal_loss` # beta = self.beta + 1 # kl_loss =", "latent_dim). data_size : int Number of data in the training set References :", "many steps to recorsd the loss. rec_dist: {\"bernoulli\", \"gaussian\", \"laplace\"}, optional Reconstruction distribution", "def __call__(self, data, recon_data, latent_dist, is_train, storer): storer = self._pre_call(is_train, storer) rec_loss =", "keepdim=False).sum(1) return logqz, logqz_prodmarginals def _reconstruction_loss(data, recon_data, distribution=\"bernoulli\", storer=None): \"\"\" Calculates the per", "self.n_train_steps += 1 if not is_train or self.n_train_steps % self.record_loss_every == 1: storer", "either minibatch stratified sampling (True) or minibatch weighted sampling (False) kwargs: Additional arguments", "dimension of distribution. logvar : torch.Tensor Diagonal log variance of the normal distribution.", "True : includes the mutual information term in the loss False : removes", "probably avoid this) latent_dist = torch.stack((latent_dist[0], latent_dist[1]), dim=2) # calculate log q(z|x) and", "return_matrix=False).view(batch_size, -1).sum(1) if not self.is_mss: # minibatch weighted sampling logqz, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist,", "= (logqz - logqz_prodmarginals) for i in range(latent_dist.size(1)): storer['kl_loss_' + str(i)].append(tc_loss_vec[i].item()) return loss", "storer : dict Dictionary in which to store important variables for vizualisation. Returns", "rec_loss = _reconstruction_loss(data1, recon_batch, storer=storer, distribution=self.rec_dist) # TODO: remove this kl_loss term once", "= device self.is_mutual_info = is_mutual_info self.is_mss = is_mss self.discriminator = Discriminator(**disc_kwargs).to(self.device) self.optimizer_d =", "storer['loss'].append(loss.item()) storer['mi_loss'].append(mi_loss.item()) storer['tc_loss'].append(tc_loss.item()) storer['dw_kl_loss'].append(dw_kl_loss.item()) # TODO Remove this when visualisation fixed tc_loss_vec =", "Shape : (batch_size, n_chan, height, width). recon_data : torch.Tensor Reconstructed data. Shape :", "distribution. Shape (batch_size, latent_dim, 2) latent_sample: torch.Tensor sample from the latent dimension using", "storer): \"\"\" Calculates loss for a batch of data. Parameters ---------- data :", "logqz).mean() tc_loss = (logqz - logqz_prodmarginals).mean() dw_kl_loss = (logqz_prodmarginals - logpz).mean() anneal_rec =", "data : torch.Tensor Input data (e.g. batch of images). Shape : (batch_size, n_chan,", "= latent_dist.size(0) _logqz = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logiw_matrix = log_importance_weight_matrix(batch_size, data_size).to(latent_dist.device) logqz", "a binary cross entropy (bse) loss and is the most commonly used. It", "latent_sample1 = model(data1) rec_loss = _reconstruction_loss(data1, recon_batch, storer=storer, distribution=self.rec_dist) # TODO: remove this", "C. C_n_interp : float, optional Number of training iterations for interpolating C. gamma", "latent_dist, batch_size, return_matrix=True) logiw_matrix = log_importance_weight_matrix(batch_size, data_size).to(latent_dist.device) logqz = torch.logsumexp(logiw_matrix + _logqz.sum(2), dim=1,", "\"\"\" Module containing all vae losses. \"\"\" import abc import math import torch", "model.sample_latent(data2) z_perm = _permute_dims(latent_sample2).detach() d_z_perm = self.discriminator(z_perm) # Calculate total correlation loss d_tc_loss", "rec_dist`. References ---------- [1] Kim, Hyunjik, and <NAME>. \"Disentangling by factorising.\" arXiv preprint", "q(z|x) and _log q(z) matrix logqz_condx = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=False).sum(dim=1) # calculate", "backprop if evaluating return vae_loss # Run VAE optimizer optimizer.zero_grad() vae_loss.backward(retain_graph=True) optimizer.step() #", "batch but not pixel and channel) \"\"\" batch_size, n_chan, height, width = recon_data.size()", "included in `_kl_normal_loss` # beta = self.beta + 1 # kl_loss = _dimwise_kl_loss(*latent_dist,", "of latent distribution latent_sample2 = model.sample_latent(data2) z_perm = _permute_dims(latent_sample2).detach() d_z_perm = self.discriminator(z_perm) #", "distribution latent_sample2 = model.sample_latent(data2) z_perm = _permute_dims(latent_sample2).detach() d_z_perm = self.discriminator(z_perm) # Calculate total", "return_matrix=False).view(half_batch_size, -1).sum(1) if not self.is_mss: # minibatch weighted sampling _, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist,", "\"\"\" Compute the Beta-VAE loss as in [1] Parameters ---------- beta : float,", "= (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train else 1) # total loss loss", "# calculate log p(z) prior_params = torch.zeros(batch_size, latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample, prior_params,", "storer) # # vae_loss = rec_loss + kl_loss + beta * tc_loss if", "minibatch stratified sampling _, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample1, self.data_size) gamma = self.gamma +", "# beta has to be increased by one for correct comparaison # #", "== \"betaB\": return BetaBLoss(C_init=kwargs_parse[\"betaB_initC\"], C_fin=kwargs_parse[\"betaB_finC\"], C_n_interp=kwargs_parse[\"betaB_stepsC\"], gamma=kwargs_parse[\"betaB_G\"], **kwargs_all) elif name == \"factor\": return", "class BaseLoss(abc.ABC): \"\"\" Base class for losses. Parameters ---------- record_loss_every: int, optional Every", "training set References : [1] Chen, <NAME>, et al. \"Isolating sources of disentanglement", "weighted sampling or minibatch stratified sampling according to [1] Parameters ---------- data_size: int", "https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 kl_loss = _kl_normal_loss(*latent_dist, storer) d_z = self.discriminator(latent_sample1) # clamping to 0 because", "sample 2 batches batch_size = data.size(dim=0) half_batch_size = batch_size // 2 data =", "anything for L1 loss = F.l1_loss(recon_data, data, reduction=\"sum\") else: raise ValueError(\"Unkown distribution: {}\".format(distribution))", "dw_kl_loss = (logqz_prodmarginals - logpz).mean() vae_loss = rec_loss + anneal_rec * (gamma *", "torch.stack((latent_dist[0], latent_dist[1]), dim=2) # calculate log q(z|x) and _log q(z) matrix logqz_condx =", "References : [1] Chen, <NAME>, et al. \"Isolating sources of disentanglement in variational", "to a binary cross entropy (bse) loss and is the most commonly used.", "for correct comparaison # # as the TC term is included in `_kl_normal_loss`", "increased by one for correct comparaison # # as the TC term is", "annealing_steps == 0: return fin assert fin > init delta = fin -", "each of the latent dimensions (mean and log_var). Parameters ---------- latent_sample: torch.Tensor sample", "Compute the decomposed KL loss with either minibatch weighted sampling or minibatch stratified", "**kwargs_all) elif name == \"factor\": return FactorKLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], gamma=kwargs_parse[\"factor_G\"], is_mutual_info=not kwargs_parse[\"no_mutual_info\"], is_mss=not kwargs_parse[\"no_mss\"],", "of the normal distribution. Shape (batch_size, latent_dim) where D is dimension of distribution.", "log variance of the normal distribution. Shape (batch_size, latent_dim) storer : dict Dictionary", "optimal. Gaussian distribution corresponds to MSE, and is sometimes used, but hard to", "is_train, storer): \"\"\" Calculates loss for a batch of data. Parameters ---------- data", "vae_loss = rec_loss + anneal_rec * (gamma * tc_loss + dw_kl_loss) # if", "vae_loss = rec_loss + kl_loss + beta * tc_loss if storer is not", "Additional arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Chen, <NAME>, et al.", "for each latent dimension latent_kl = 0.5 * (-1 - logvar + mean.pow(2)", "doing anything for L1 loss = F.l1_loss(recon_data, data, reduction=\"sum\") else: raise ValueError(\"Unkown distribution:", "= beta self.alpha = alpha self.gamma = gamma self.is_mss = is_mss # minibatch", "q(z) (latent_dist) across the batch for each of the latent dimensions (mean and", "negative : TEST tc_loss = (F.logsigmoid(d_z) - F.logsigmoid(1 - d_z)).clamp(0).mean() anneal_rec = (linear_annealing(0,", "F.logsigmoid(1 - d_z_perm))).mean() # Run discriminator optimizer self.optimizer_d.zero_grad() d_tc_loss.backward() self.optimizer_d.step() if storer is", "record_loss_every=50, rec_dist=\"bernoulli\", steps_anneal=0): self.n_train_steps = 0 self.record_loss_every = record_loss_every self.rec_dist = rec_dist self.steps_anneal", "variational framework.\" (2016). kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. \"\"\" def __init__(self,", "store important variables for vizualisation. Returns ------- loss : torch.Tensor Per image cross", "return perm def linear_annealing(init, fin, step, annealing_steps): \"\"\"Linear annealing of a parameter.\"\"\" if", "dsprites: 6, celeba: 15 self.device = device self.dataset_size = data_size self.beta = beta", "Advances in Neural Information Processing Systems. 2018. \"\"\" def __init__(self, device, data_size, alpha=1.,", "self.is_mutual_info = is_mutual_info self.is_mss = is_mss self.discriminator = Discriminator(**disc_kwargs).to(self.device) self.optimizer_d = optim.Adam(self.discriminator.parameters(), **optim_kwargs)", "is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) else: raise ValueError(\"Uknown loss : {}\".format(name)) class BaseLoss(abc.ABC): \"\"\" Base", "= self._pre_call(is_train, storer) rec_loss = _reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer)", "losses. \"\"\" import abc import math import torch from torch.nn import functional as", ": dict Dictionary in which to store important variables for vizualisation. Returns -------", "Dictionary in which to store important variables for vizualisation. Returns ------- loss :", "minibatch weighted sampling (False) kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References ----------", "device : torch.device beta : float, optional Weight of the TC loss term.", ": torch.Tensor Diagonal log variance of the normal distribution. Shape (batch_size, latent_dim) storer", "---------- record_loss_every: int, optional Every how many steps to recorsd the loss. rec_dist:", "(linear_annealing(self.C_init, self.C_fin, self.n_train_steps, self.C_n_interp) if is_train else self.C_fin) loss = rec_loss + self.gamma", "= 0.5 * (-1 - logvar + mean.pow(2) + logvar.exp()).mean(dim=0) total_kl = latent_kl.sum()", "log q(z) and the log (product of marginals of q(z_j)) with minibatch stratified", "in ref [1]. Randomly permutes the sample from q(z) (latent_dist) across the batch", "latent distribution latent_sample2 = model.sample_latent(data2) z_perm = _permute_dims(latent_sample2).detach() d_z_perm = self.discriminator(z_perm) # Calculate", "second sample of latent distribution latent_sample2 = model.sample_latent(data2) z_perm = _permute_dims(latent_sample2).detach() d_z_perm =", "Systems. 2018. \"\"\" batch_size = latent_dist.size(0) _logqz = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logiw_matrix", "distribution. Shape (batch_size, latent_dim) storer : dict Dictionary in which to store important", "fin - init annealed = min(init + delta * step / annealing_steps, fin)", "(False) kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Chen, <NAME>,", "latent_kl = 0.5 * (-1 - logvar + mean.pow(2) + logvar.exp()).mean(dim=0) total_kl =", "to store important variables for vizualisation. \"\"\" latent_dim = mean.size(1) # batch mean", "Mean and logvar of the normal distribution. Shape (batch_size, latent_dim, 2) latent_sample: torch.Tensor", "two batches. In the paper they sample 2 batches batch_size = data.size(dim=0) half_batch_size", ": float, optional Final annealed capacity C. C_n_interp : float, optional Number of", "latent_dist.size(0) _logqz = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logqz_prodmarginals = (torch.logsumexp(_logqz, dim=1, keepdim=False) -", "sampling logqz, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample, self.dataset_size) # rec loss, mutual information, total", "self.C_fin) loss = rec_loss + self.gamma * (kl_loss - C).abs() batch_size = data.size(0)", "storer): if is_train: self.n_train_steps += 1 if not is_train or self.n_train_steps % self.record_loss_every", "log_density_normal(latent_sample1, prior_params, half_batch_size, return_matrix=False).view(half_batch_size, -1).sum(1) if not self.is_mss: # minibatch weighted sampling _,", "distribution == \"gaussian\": # loss in [0,255] space but normalized by 255 to", "logvar, storer=None): \"\"\" Calculates the KL divergence between a normal distribution with diagonal", "dimensions (mean and log_var). Parameters ---------- latent_sample: torch.Tensor sample from the latent dimension", "torch.logsumexp(logiw_matrix.view(batch_size, batch_size, 1) + _logqz, dim=1, keepdim=False).sum(1) return logqz, logqz_prodmarginals def _reconstruction_loss(data, recon_data,", "distribution=self.rec_dist) # TODO: remove this kl_loss term once viz is sorted # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863", "self.beta # kl_loss = _kl_normal_loss(*latent_dist, storer) # else: # # beta has to", "1 if not is_train or self.n_train_steps % self.record_loss_every == 1: storer = storer", "how many steps to recorsd the loss. rec_dist: {\"bernoulli\", \"gaussian\", \"laplace\"}, optional Reconstruction", "(i.e. normalized per batch but not pixel and channel) \"\"\" batch_size, n_chan, height,", "dict Dictionary in which to store important variables for vizualisation. \"\"\" latent_dim =", "(batch_size, n_chan, height, width). latent_dist : tuple of torch.tensor sufficient statistics of the", "logpz).mean() vae_loss = rec_loss + anneal_rec * (gamma * tc_loss + dw_kl_loss) #", "pi = torch.randperm(batch_size).to(latent_sample.device) perm[:, z] = latent_sample[pi, z] return perm def linear_annealing(init, fin,", "not None: storer['loss'].append(vae_loss.item()) storer['tc_loss'].append(tc_loss.item()) if not model.training: # don't backprop if evaluating return", "if storer is not None: storer['loss'].append(loss.item()) return loss class FactorKLoss(BaseLoss): \"\"\" Compute the", "storer['discrim_loss'].append(d_tc_loss.item()) return vae_loss class BatchTCLoss(BaseLoss): \"\"\" Compute the decomposed KL loss with either", "> init delta = fin - init annealed = min(init + delta *", "distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer) C = (linear_annealing(self.C_init, self.C_fin, self.n_train_steps, self.C_n_interp) if is_train", "255 to not be too big loss = F.mse_loss(recon_data * 255, data *", "self.is_mutual_info: # beta = self.beta # kl_loss = _kl_normal_loss(*latent_dist, storer) # else: #", "[1] <NAME>, and <NAME>. \"Disentangling by factorising.\" arXiv preprint arXiv:1802.05983 (2018). \"\"\" perm", "float, optional Number of training iterations for interpolating C. gamma : float, optional", "MSE. storer : dict Dictionary in which to store important variables for vizualisation.", "dict Dictionary in which to store important variables for vizualisation. Returns ------- loss", "(2018). \"\"\" def __init__(self, device, data_size, gamma=40., is_mutual_info=True, is_mss=False, disc_kwargs=dict(neg_slope=0.2, latent_dim=10, hidden_units=1000), optim_kwargs=dict(lr=5e-4,", "to not be too big loss = F.mse_loss(recon_data * 255, data * 255,", "- logpz).mean() vae_loss = rec_loss + anneal_rec * (gamma * tc_loss + dw_kl_loss)", "sufficient statistics of the latent dimension. E.g. for gaussian (mean, log_var) each of", "data.size(0) if storer is not None: storer['loss'].append(loss.item()) return loss class FactorKLoss(BaseLoss): \"\"\" Compute", "\"\"\" Calculates loss for a batch of data. Parameters ---------- data : torch.Tensor", "this when visualisation fixed tc_loss_vec = (logqz - logqz_prodmarginals) for i in range(latent_dist.size(1)):", "+ logvar.exp()).mean(dim=0) total_kl = latent_kl.sum() if storer is not None: storer['kl_loss'].append(total_kl.item()) for i", "= data[1] # Factor VAE Loss recon_batch, latent_dist, latent_sample1 = model(data1) rec_loss =", "the each pixel. Implicitely defines the loss Bernoulli corresponds to a binary cross", "# don't backprop if evaluating return vae_loss # Run VAE optimizer optimizer.zero_grad() vae_loss.backward(retain_graph=True)", "might not be optimal. Gaussian distribution corresponds to MSE, and is sometimes used,", "data, reduction=\"sum\") elif distribution == \"gaussian\": # loss in [0,255] space but normalized", "and is sometimes used, but hard to train ecause it ends up focusing", "# TODO: remove this kl_loss term once viz is sorted # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 kl_loss", "storer) C = (linear_annealing(self.C_init, self.C_fin, self.n_train_steps, self.C_n_interp) if is_train else self.C_fin) loss =", "self.gamma * tc_loss) else: # return vae loss without mutual information term #", "---------- device : torch.device beta : float, optional Weight of the TC loss", "likelihood on the each pixel. Implicitely defines the reconstruction loss. Bernoulli corresponds to", ": torch.Tensor Mean of the normal distribution. Shape (batch_size, latent_dim) where D is", "kwargs_all = dict(rec_dist=kwargs_parse[\"rec_dist\"], steps_anneal=kwargs_parse[\"reg_anneal\"]) if name == \"betaH\": return BetaHLoss(beta=kwargs_parse[\"betaH_B\"], **kwargs_all) elif name", "arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Kim, Hyunjik, and <NAME>. \"Disentangling", "image cross entropy (i.e. normalized per batch but not pixel and channel) \"\"\"", "a normal distribution with diagonal covariance and a unit normal distribution. Parameters ----------", "latent_dist : tuple of torch.tensor sufficient statistics of the latent dimension. E.g. for", "beta : float, optional Weight of the kl divergence. References: [1] Higgins, Irina,", "mutual information term # change latent dist to torch.tensor (could probably avoid this)", "annealing_steps): \"\"\"Linear annealing of a parameter.\"\"\" if annealing_steps == 0: return fin assert", "storer) # else: # # beta has to be increased by one for", "self.alpha = alpha self.gamma = gamma self.is_mss = is_mss # minibatch stratified sampling", "mean.size(1) # batch mean of kl for each latent dimension latent_kl = 0.5", "torch.Tensor Reconstructed data. Shape : (batch_size, n_chan, height, width). distribution : {\"bernoulli\", \"gaussian\",", "[1]. Randomly permutes the sample from q(z) (latent_dist) across the batch for each", "defines the reconstruction loss. Bernoulli corresponds to a binary cross entropy (bse), Gaussian", "the Factor-VAE loss as per Algorithm 2 of [1] Parameters ---------- device :", "# change latent dist to torch.tensor (could probably avoid this) latent_dist = torch.stack((latent_dist[0],", "al. \"Isolating sources of disentanglement in variational autoencoders.\" Advances in Neural Information Processing", "of kl for each latent dimension latent_kl = 0.5 * (-1 - logvar", "the log (product of marginals of q(z_j)) with minibatch stratified sampling. Parameters ----------", "includes the mutual information term in the loss False : removes mutual information", "and the log (product of marginals of q(z_j)) with minibatch weighted sampling. Parameters", "storer) # factor-vae split data into two batches. In the paper they sample", "arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Chen, <NAME>, et al. \"Isolating", "or self.n_train_steps % self.record_loss_every == 1: storer = storer else: storer = None", "the normal distribution. Shape (batch_size, latent_dim, 2) latent_sample: torch.Tensor sample from the latent", "return_matrix=False).sum(dim=1) # calculate log p(z) prior_params = torch.zeros(batch_size, latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample,", "minibatch stratified sampling logqz, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample, self.dataset_size) # rec loss, mutual", "distribution. Shape (batch_size, latent_dim) where D is dimension of distribution. logvar : torch.Tensor", "+ mean.pow(2) + logvar.exp()).mean(dim=0) total_kl = latent_kl.sum() if storer is not None: storer['kl_loss'].append(total_kl.item())", "it ends up focusing only a few pixels that are very wrong. Laplace", "(True) or minibatch weighted sampling (False) kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`.", "loss as in [1] Parameters ---------- C_init : float, optional Starting annealed capacity", "+ 1 dw_kl_loss = (logqz_prodmarginals - logpz).mean() vae_loss = rec_loss + anneal_rec *", "False : removes mutual information discriminator : disvae.discriminator.Discriminator optimizer_d : torch.optim kwargs: Additional", "Mean of the normal distribution. Shape (batch_size, latent_dim) where D is dimension of", "---------- latent_dist : torch.Tensor Mean and logvar of the normal distribution. Shape (batch_size,", "storer['recon_loss'].append(loss.item()) return loss def _kl_normal_loss(mean, logvar, storer=None): \"\"\" Calculates the KL divergence between", "Learning basic visual concepts with a constrained variational framework.\" (2016). kwargs: Additional arguments", ": includes the mutual information term in the loss False : removes mutual", "# TODO replace this code with the following commented out code after viz", "the loss Bernoulli corresponds to a binary cross entropy (bse) loss and is", "TO-DO: clean data_size and device def get_loss_f(name, kwargs_parse={}): \"\"\"Return the correct loss function", "FactorKLoss(BaseLoss): \"\"\" Compute the Factor-VAE loss as per Algorithm 2 of [1] Parameters", "torch.tensor (could probably avoid this) latent_dist = torch.stack((latent_dist[0], latent_dist[1]), dim=2) # calculate log", "term. latent_dim: int Dimension of the latent variable is_mss : bool Selects either", "optimizer, storer): storer = self._pre_call(model.training, storer) # factor-vae split data into two batches.", "torch.tensor sufficient statistics of the latent dimension. E.g. for gaussian (mean, log_var) each", "al. \"beta-vae: Learning basic visual concepts with a constrained variational framework.\" (2016). kwargs:", "https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 if self.is_mutual_info: # return vae loss vae_loss = rec_loss + anneal_rec *", "# Calculate total correlation loss d_tc_loss = - (0.5 * (F.logsigmoid(d_z) + F.logsigmoid(1", "beta=6., gamma=1., is_mss=False, **kwargs): super().__init__(**kwargs) # beta values: dsprites: 6, celeba: 15 self.device", "storer : dict Dictionary in which to store important variables for vizualisation. \"\"\"", "term. gamma : float Weight of the dimension-wise KL term. latent_dim: int Dimension", "\"Disentangling by factorising.\" arXiv preprint arXiv:1802.05983 (2018). \"\"\" perm = torch.zeros_like(latent_sample) batch_size, dim_z", "big but # multiply by 255 and divide 255, is the same as", "------- loss : torch.Tensor Per image cross entropy (i.e. normalized per batch but", "of [1] Parameters ---------- device : torch.device beta : float, optional Weight of", "arXiv:1804.03599 (2018). \"\"\" def __init__(self, C_init=0., C_fin=5., C_n_interp=25000, gamma=30., **kwargs): super().__init__(**kwargs) self.gamma =", "dw_kl_loss) # if self.is_mutual_info: # beta = self.beta # kl_loss = _kl_normal_loss(*latent_dist, storer)", "Parameters ---------- data : torch.Tensor Input data (e.g. batch of images). Shape :", "class for losses. Parameters ---------- record_loss_every: int, optional Every how many steps to", "sources of disentanglement in variational autoencoders.\" Advances in Neural Information Processing Systems. 2018.", "None: storer['loss'].append(vae_loss.item()) storer['tc_loss'].append(tc_loss.item()) if not model.training: # don't backprop if evaluating return vae_loss", "loss : {}\".format(name)) class BaseLoss(abc.ABC): \"\"\" Base class for losses. Parameters ---------- record_loss_every:", "the regularisation. \"\"\" def __init__(self, record_loss_every=50, rec_dist=\"bernoulli\", steps_anneal=0): self.n_train_steps = 0 self.record_loss_every =", "* data_size)).sum(dim=1) logqz = torch.logsumexp(_logqz.sum(2), dim=1, keepdim=False) \\ - math.log(batch_size * data_size) return", "disvae.utils.math import log_density_normal, log_importance_weight_matrix # TO-DO: clean data_size and device def get_loss_f(name, kwargs_parse={}):", "range(latent_dist.size(1)): storer['kl_loss_' + str(i)].append(tc_loss_vec[i].item()) return loss def _minibatch_weighted_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates log", "but normalized by 255 to not be too big loss = F.mse_loss(recon_data *", "Gaussian corresponds to MSE, Laplace corresponds to L1. steps_anneal: nool, optional Number of", "* (kl_loss - C).abs() batch_size = data.size(0) if storer is not None: storer['loss'].append(loss.item())", "disc_kwargs=dict(neg_slope=0.2, latent_dim=10, hidden_units=1000), optim_kwargs=dict(lr=5e-4, betas=(0.5, 0.9)), **kwargs): super().__init__(**kwargs) self.gamma = gamma self.data_size =", "2018. \"\"\" batch_size = latent_dist.size(0) _logqz = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logiw_matrix =", "dim_z = perm.size() for z in range(dim_z): pi = torch.randperm(batch_size).to(latent_sample.device) perm[:, z] =", "loss d_tc_loss = - (0.5 * (F.logsigmoid(d_z) + F.logsigmoid(1 - d_z_perm))).mean() # Run", "Final annealed capacity C. C_n_interp : float, optional Number of training iterations for", ": (batch_size, latent_dim). data_size : int Number of data in the training set", "kl for each latent dimension latent_kl = 0.5 * (-1 - logvar +", "self.rec_dist = rec_dist self.steps_anneal = steps_anneal @abc.abstractmethod def __call__(self, data, recon_data, latent_dist, is_train,", "annealed capacity C. C_n_interp : float, optional Number of training iterations for interpolating", "ecause it ends up focusing only a few pixels that are very wrong.", "2018. \"\"\" def __init__(self, device, data_size, alpha=1., beta=6., gamma=1., is_mss=False, **kwargs): super().__init__(**kwargs) #", "but # multiply by 255 and divide 255, is the same as not", "self.device = device self.dataset_size = data_size self.beta = beta self.alpha = alpha self.gamma", "Factor-VAE loss as per Algorithm 2 of [1] Parameters ---------- device : torch.device", "anneal_rec * (gamma * tc_loss + dw_kl_loss) # if self.is_mutual_info: # beta =", "total correlation and dim-wise kl rec_loss = _reconstruction_loss(data, recon_batch, storer=storer, distribution=self.rec_dist) mi_loss =", "None: storer['recon_loss'].append(loss.item()) return loss def _kl_normal_loss(mean, logvar, storer=None): \"\"\" Calculates the KL divergence", "kwargs_parse[\"no_mss\"], **kwargs_all) elif name == \"batchTC\": return BatchTCLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], alpha=kwargs_parse[\"batchTC_A\"], beta=kwargs_parse[\"batchTC_B\"], gamma=kwargs_parse[\"batchTC_G\"], is_mss=not", "= rec_loss + anneal_rec * (self.alpha * mi_loss + self.beta * tc_loss +", "of data in the training set References : [1] Chen, <NAME>, et al.", "---------- [1] Burgess, <NAME>., et al. \"Understanding disentangling in $\\beta$-VAE.\" arXiv preprint arXiv:1804.03599", "**kwargs_all) else: raise ValueError(\"Uknown loss : {}\".format(name)) class BaseLoss(abc.ABC): \"\"\" Base class for", "image reconstruction loss for a batch of data. Parameters ---------- data : torch.Tensor", "super().__init__(**kwargs) self.beta = beta def __call__(self, data, recon_data, latent_dist, is_train, storer): storer =", "self.beta = beta def __call__(self, data, recon_data, latent_dist, is_train, storer): storer = self._pre_call(is_train,", "space but normalized by 255 to not be too big but # multiply", "dataset alpha : float Weight of the mutual information term. beta : float", "= model(data1) rec_loss = _reconstruction_loss(data1, recon_batch, storer=storer, distribution=self.rec_dist) # TODO: remove this kl_loss", "= data_size self.device = device self.is_mutual_info = is_mutual_info self.is_mss = is_mss self.discriminator =", "data_size, alpha=1., beta=6., gamma=1., is_mss=False, **kwargs): super().__init__(**kwargs) # beta values: dsprites: 6, celeba:", "TODO Remove this when visualisation fixed tc_loss_vec = (logqz - logqz_prodmarginals) for i", "logvar : torch.Tensor Diagonal log variance of the normal distribution. Shape (batch_size, latent_dim)", "return storer class BetaHLoss(BaseLoss): \"\"\" Compute the Beta-VAE loss as in [1] Parameters", ": torch.optim kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Kim,", "images). Shape : (batch_size, n_chan, height, width). recon_data : torch.Tensor Reconstructed data. Shape", "Information Processing Systems. 2018. \"\"\" def __init__(self, device, data_size, alpha=1., beta=6., gamma=1., is_mss=False,", "\"gaussian\": # loss in [0,255] space but normalized by 255 to not be", "gamma self.is_mss = is_mss # minibatch stratified sampling def __call__(self, data, recon_batch, latent_dist,", "is_colored = n_chan == 3 if distribution == \"bernoulli\": loss = F.binary_cross_entropy(recon_data, data,", "the sample from q(z) (latent_dist) across the batch for each of the latent", "tc_loss = (logqz - logqz_prodmarginals).mean() dw_kl_loss = (logqz_prodmarginals - logpz).mean() anneal_rec = (linear_annealing(0,", "distribution istribution of the likelihood on the each pixel. Implicitely defines the reconstruction", "== \"laplace\": # loss in [0,255] space but normalized by 255 to not", "latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample, prior_params, batch_size, return_matrix=False).view(batch_size, -1).sum(1) if not self.is_mss: #", "# Discriminator Loss # Get second sample of latent distribution latent_sample2 = model.sample_latent(data2)", "after viz is fixed # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 if self.is_mutual_info: # return vae loss vae_loss", "\"\"\" batch_size = latent_dist.size(0) _logqz = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logqz_prodmarginals = (torch.logsumexp(_logqz,", "anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train else 1) # total loss", "optional Final annealed capacity C. C_n_interp : float, optional Number of training iterations", "= _kl_normal_loss(*latent_dist, storer) C = (linear_annealing(self.C_init, self.C_fin, self.n_train_steps, self.C_n_interp) if is_train else self.C_fin)", "def __init__(self, beta=4, **kwargs): super().__init__(**kwargs) self.beta = beta def __call__(self, data, recon_data, latent_dist,", "int Size of the dataset alpha : float Weight of the mutual information", "storer) batch_size = data.size(0) # change latent dist to torch.tensor (could probably avoid", "e.g. rec_dist`. \"\"\" def __init__(self, beta=4, **kwargs): super().__init__(**kwargs) self.beta = beta def __call__(self,", "and channel) \"\"\" batch_size, n_chan, height, width = recon_data.size() is_colored = n_chan ==", "is not None: storer['discrim_loss'].append(d_tc_loss.item()) return vae_loss class BatchTCLoss(BaseLoss): \"\"\" Compute the decomposed KL", "space but normalized by 255 to not be too big loss = F.mse_loss(recon_data", "gradually adding the regularisation. \"\"\" def __init__(self, record_loss_every=50, rec_dist=\"bernoulli\", steps_anneal=0): self.n_train_steps = 0", "KL loss with either minibatch weighted sampling or minibatch stratified sampling according to", "minibatch stratified sampling def __call__(self, data, recon_batch, latent_dist, is_train, storer, latent_sample=None): storer =", "model, optimizer, storer): storer = self._pre_call(model.training, storer) # factor-vae split data into two", "else: # # beta has to be increased by one for correct comparaison", "# beta = self.beta + 1 # kl_loss = _dimwise_kl_loss(*latent_dist, storer) # #", "self.dataset_size) else: # minibatch stratified sampling logqz, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample, self.dataset_size) #", "(F.logsigmoid(d_z) - F.logsigmoid(1 - d_z)).clamp(0).mean() anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if model.training", "that it doesn't penalize the same way (0.1,0.2) and (0.4,0.5), which might not", "Remove this when visualisation fixed tc_loss_vec = (logqz - logqz_prodmarginals) for i in", "is the most commonly used. It has the issue that it doesn't penalize", "storer else: storer = None return storer class BetaHLoss(BaseLoss): \"\"\" Compute the Beta-VAE", "Reconstructed data. Shape : (batch_size, n_chan, height, width). latent_dist : tuple of torch.tensor", "loss def _kl_normal_loss(mean, logvar, storer=None): \"\"\" Calculates the KL divergence between a normal", "gamma=kwargs_parse[\"batchTC_G\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) else: raise ValueError(\"Uknown loss : {}\".format(name)) class BaseLoss(abc.ABC): \"\"\"", "loss : torch.Tensor Per image cross entropy (i.e. normalized per batch but not", "Dimension of the latent variable is_mss : bool Selects either minibatch stratified sampling", "distribution: {}\".format(distribution)) loss = loss / batch_size if storer is not None: storer['recon_loss'].append(loss.item())", "rec loss, mutual information, total correlation and dim-wise kl rec_loss = _reconstruction_loss(data, recon_batch,", "\"\"\" latent_dim = mean.size(1) # batch mean of kl for each latent dimension", "correct loss function given the argparse arguments.\"\"\" kwargs_all = dict(rec_dist=kwargs_parse[\"rec_dist\"], steps_anneal=kwargs_parse[\"reg_anneal\"]) if name", "Size of the dataset alpha : float Weight of the mutual information term.", "distribution : {\"bernoulli\", \"gaussian\", \"laplace\"} Distribution of the likelihood on the each pixel.", "corresponds to L1 solves partially the issue of MSE. storer : dict Dictionary", "doesn't penalize the same way (0.1,0.2) and (0.4,0.5), which might not be optimal.", "perm.size() for z in range(dim_z): pi = torch.randperm(batch_size).to(latent_sample.device) perm[:, z] = latent_sample[pi, z]", "latent_dist[1]), dim=2) # calculate log q(z|x) and _log q(z) matrix logqz_condx = log_density_normal(latent_sample,", "# calculate log q(z|x) and _log q(z) matrix logqz_condx = log_density_normal(latent_sample, latent_dist, batch_size,", "the argparse arguments.\"\"\" kwargs_all = dict(rec_dist=kwargs_parse[\"rec_dist\"], steps_anneal=kwargs_parse[\"reg_anneal\"]) if name == \"betaH\": return BetaHLoss(beta=kwargs_parse[\"betaH_B\"],", "divergence term. kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Burgess,", "gamma self.data_size = data_size self.device = device self.is_mutual_info = is_mutual_info self.is_mss = is_mss", "recon_data.size() is_colored = n_chan == 3 if distribution == \"bernoulli\": loss = F.binary_cross_entropy(recon_data,", "batch_size, dim_z = perm.size() for z in range(dim_z): pi = torch.randperm(batch_size).to(latent_sample.device) perm[:, z]", "Additional arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Burgess, <NAME>., et al.", ": float, optional Number of training iterations for interpolating C. gamma : float,", "the same as not doing anything for L1 loss = F.l1_loss(recon_data, data, reduction=\"sum\")", "normal distribution. Parameters ---------- mean : torch.Tensor Mean of the normal distribution. Shape", "range(dim_z): pi = torch.randperm(batch_size).to(latent_sample.device) perm[:, z] = latent_sample[pi, z] return perm def linear_annealing(init,", "dimension. E.g. for gaussian (mean, log_var) each of shape : (batch_size, latent_dim). storer", "batch_size = latent_dist.size(0) _logqz = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logiw_matrix = log_importance_weight_matrix(batch_size, data_size).to(latent_dist.device)", "or minibatch weighted sampling (False) kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References", "latent_dim). storer : dict Dictionary in which to store important variables for vizualisation.", "of the latent dimension. E.g. for gaussian (mean, log_var) each of shape :", "log p(z) prior_params = torch.zeros(batch_size, latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample, prior_params, batch_size, return_matrix=False).view(batch_size,", "stratified sampling (True) or minibatch weighted sampling (False) kwargs: Additional arguments for `BaseLoss`,", "storer): storer = self._pre_call(model.training, storer) # factor-vae split data into two batches. In", "MSE, and is sometimes used, but hard to train ecause it ends up", "_pre_call(self, is_train, storer): if is_train: self.n_train_steps += 1 if not is_train or self.n_train_steps", "= C_fin self.C_n_interp = C_n_interp def __call__(self, data, recon_data, latent_dist, is_train, storer): storer", "(0.5 * (F.logsigmoid(d_z) + F.logsigmoid(1 - d_z_perm))).mean() # Run discriminator optimizer self.optimizer_d.zero_grad() d_tc_loss.backward()", "---------- C_init : float, optional Starting annealed capacity C. C_fin : float, optional", "= _minibatch_stratified_sampling(latent_dist, latent_sample, self.dataset_size) # rec loss, mutual information, total correlation and dim-wise", "latent_dist.size(0) _logqz = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logiw_matrix = log_importance_weight_matrix(batch_size, data_size).to(latent_dist.device) logqz =", "F.l1_loss(recon_data, data, reduction=\"sum\") else: raise ValueError(\"Unkown distribution: {}\".format(distribution)) loss = loss / batch_size", "the reparameterisation trick shape : (batch_size, latent_dim). data_size : int Number of data", "variational autoencoders.\" Advances in Neural Information Processing Systems. 2018. \"\"\" def __init__(self, device,", "gamma=30., **kwargs): super().__init__(**kwargs) self.gamma = gamma self.C_init = C_init self.C_fin = C_fin self.C_n_interp", "(linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train else 1) loss = rec_loss + anneal_rec", "betas=(0.5, 0.9)), **kwargs): super().__init__(**kwargs) self.gamma = gamma self.data_size = data_size self.device = device", "loss False : removes mutual information discriminator : disvae.discriminator.Discriminator optimizer_d : torch.optim kwargs:", "storer['kl_loss_' + str(i)].append(latent_kl[i].item()) return total_kl def _permute_dims(latent_sample): \"\"\" Implementation of Algorithm 1 in", "because TC cannot be negative : TEST tc_loss = (F.logsigmoid(d_z) - F.logsigmoid(1 -", "logqz, logqz_prodmarginals def _reconstruction_loss(data, recon_data, distribution=\"bernoulli\", storer=None): \"\"\" Calculates the per image reconstruction", "elif name == \"batchTC\": return BatchTCLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], alpha=kwargs_parse[\"batchTC_A\"], beta=kwargs_parse[\"batchTC_B\"], gamma=kwargs_parse[\"batchTC_G\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all)", "(linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if model.training else 1) # TODO replace this code", "Parameters ---------- mean : torch.Tensor Mean of the normal distribution. Shape (batch_size, latent_dim)", "Neural Information Processing Systems. 2018. \"\"\" batch_size = latent_dist.size(0) _logqz = log_density_normal(latent_sample, latent_dist,", "not is_train or self.n_train_steps % self.record_loss_every == 1: storer = storer else: storer", "storer = storer else: storer = None return storer class BetaHLoss(BaseLoss): \"\"\" Compute", "if is_train else self.C_fin) loss = rec_loss + self.gamma * (kl_loss - C).abs()", "steps_anneal=kwargs_parse[\"reg_anneal\"]) if name == \"betaH\": return BetaHLoss(beta=kwargs_parse[\"betaH_B\"], **kwargs_all) elif name == \"VAE\": return", "from torch import optim from .discriminator import Discriminator from disvae.utils.math import log_density_normal, log_importance_weight_matrix", "latent_dist, batch_size, return_matrix=True) logqz_prodmarginals = (torch.logsumexp(_logqz, dim=1, keepdim=False) - math.log(batch_size * data_size)).sum(dim=1) logqz", "self.n_train_steps, self.C_n_interp) if is_train else self.C_fin) loss = rec_loss + self.gamma * (kl_loss", "the Beta-VAE loss as in [1] Parameters ---------- beta : float, optional Weight", "data_size self.device = device self.is_mutual_info = is_mutual_info self.is_mss = is_mss self.discriminator = Discriminator(**disc_kwargs).to(self.device)", "torch.zeros(half_batch_size, latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample1, prior_params, half_batch_size, return_matrix=False).view(half_batch_size, -1).sum(1) if not self.is_mss:", "logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample, self.dataset_size) # rec loss, mutual information, total correlation and", "self.beta * tc_loss + self.gamma * dw_kl_loss) if storer is not None: storer['loss'].append(loss.item())", "visualisation fixed tc_loss_vec = (logqz - logqz_prodmarginals) for i in range(latent_dist.size(1)): storer['kl_loss_' +", "init delta = fin - init annealed = min(init + delta * step", "a parameter.\"\"\" if annealing_steps == 0: return fin assert fin > init delta", "KL divergence term. kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1]", "kl_loss = _dimwise_kl_loss(*latent_dist, storer) # # vae_loss = rec_loss + kl_loss + beta", "kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Kim, Hyunjik, and", "distribution=\"bernoulli\", storer=None): \"\"\" Calculates the per image reconstruction loss for a batch of", ": tuple of torch.tensor sufficient statistics of the latent dimension. E.g. for gaussian", "= alpha self.gamma = gamma self.is_mss = is_mss # minibatch stratified sampling def", "F.binary_cross_entropy(recon_data, data, reduction=\"sum\") elif distribution == \"gaussian\": # loss in [0,255] space but", "data, recon_data, latent_dist, is_train, storer): storer = self._pre_call(is_train, storer) rec_loss = _reconstruction_loss(data, recon_data,", "+ beta * tc_loss if storer is not None: storer['loss'].append(vae_loss.item()) storer['tc_loss'].append(tc_loss.item()) if not", "beta : float Weight of the total correlation term. gamma : float Weight", "the normal distribution. Shape (batch_size, latent_dim) where D is dimension of distribution. logvar", "__call__(self, data, recon_data, latent_dist, is_train, storer): storer = self._pre_call(is_train, storer) rec_loss = _reconstruction_loss(data,", "focusing only a few pixels that are very wrong. Laplace distribution corresponds to", "divergence. References: [1] Higgins, Irina, et al. \"beta-vae: Learning basic visual concepts with", "not None: storer['loss'].append(loss.item()) return loss class BetaBLoss(BaseLoss): \"\"\" Compute the Beta-VAE loss as", "device self.is_mutual_info = is_mutual_info self.is_mss = is_mss self.discriminator = Discriminator(**disc_kwargs).to(self.device) self.optimizer_d = optim.Adam(self.discriminator.parameters(),", "= log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=False).sum(dim=1) # calculate log p(z) prior_params = torch.zeros(batch_size, latent_dist.size(1),", "== \"batchTC\": return BatchTCLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], alpha=kwargs_parse[\"batchTC_A\"], beta=kwargs_parse[\"batchTC_B\"], gamma=kwargs_parse[\"batchTC_G\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) else: raise", "if model.training else 1) # TODO replace this code with the following commented", "self.data_size = data_size self.device = device self.is_mutual_info = is_mutual_info self.is_mss = is_mss self.discriminator", "for a batch of data. Parameters ---------- data : torch.Tensor Input data (e.g.", "_reconstruction_loss(data, recon_batch, storer=storer, distribution=self.rec_dist) mi_loss = (logqz_condx - logqz).mean() tc_loss = (logqz -", "divergence between a normal distribution with diagonal covariance and a unit normal distribution.", "the latent dimension using the reparameterisation trick shape : (batch_size, latent_dim). data_size :", "storer is not None: storer['recon_loss'].append(loss.item()) return loss def _kl_normal_loss(mean, logvar, storer=None): \"\"\" Calculates", "Number of annealing steps where gradually adding the regularisation. \"\"\" def __init__(self, record_loss_every=50,", "self._pre_call(is_train, storer) rec_loss = _reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer) C", "= _reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer) C = (linear_annealing(self.C_init, self.C_fin,", "else: # return vae loss without mutual information term # change latent dist", "Returns ------- loss : torch.Tensor Per image cross entropy (i.e. normalized per batch", "logqz_prodmarginals = torch.logsumexp(logiw_matrix.view(batch_size, batch_size, 1) + _logqz, dim=1, keepdim=False).sum(1) return logqz, logqz_prodmarginals def", "the latent dimensions (mean and log_var). Parameters ---------- latent_sample: torch.Tensor sample from the", "into two batches. In the paper they sample 2 batches batch_size = data.size(dim=0)", "is not None: storer['loss'].append(loss.item()) storer['mi_loss'].append(mi_loss.item()) storer['tc_loss'].append(tc_loss.item()) storer['dw_kl_loss'].append(dw_kl_loss.item()) # TODO Remove this when visualisation", "== \"factor\": return FactorKLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], gamma=kwargs_parse[\"factor_G\"], is_mutual_info=not kwargs_parse[\"no_mutual_info\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) elif name", "storer class BetaHLoss(BaseLoss): \"\"\" Compute the Beta-VAE loss as in [1] Parameters ----------", "VAE optimizer optimizer.zero_grad() vae_loss.backward(retain_graph=True) optimizer.step() # Discriminator Loss # Get second sample of", "is_train, storer, latent_sample=None): storer = self._pre_call(is_train, storer) batch_size = data.size(0) # change latent", "L1. steps_anneal: nool, optional Number of annealing steps where gradually adding the regularisation.", "according to [1] Parameters ---------- data_size: int Size of the dataset alpha :", "in variational autoencoders.\" Advances in Neural Information Processing Systems. 2018. \"\"\" def __init__(self,", "(product of marginals of q(z_j)) with minibatch weighted sampling. Parameters ---------- latent_dist :", "n_chan, height, width). latent_dist : tuple of torch.tensor sufficient statistics of the latent", "__call__(self, data, recon_data, latent_dist, is_train, storer): \"\"\" Calculates loss for a batch of", "kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. \"\"\" def __init__(self, beta=4, **kwargs): super().__init__(**kwargs)", "optional Every how many steps to recorsd the loss. rec_dist: {\"bernoulli\", \"gaussian\", \"laplace\"},", "if name == \"betaH\": return BetaHLoss(beta=kwargs_parse[\"betaH_B\"], **kwargs_all) elif name == \"VAE\": return BetaHLoss(beta=1,", "def _pre_call(self, is_train, storer): if is_train: self.n_train_steps += 1 if not is_train or", "loss class BetaBLoss(BaseLoss): \"\"\" Compute the Beta-VAE loss as in [1] Parameters ----------", "return_matrix=True) logiw_matrix = log_importance_weight_matrix(batch_size, data_size).to(latent_dist.device) logqz = torch.logsumexp(logiw_matrix + _logqz.sum(2), dim=1, keepdim=False) logqz_prodmarginals", "= rec_loss + anneal_rec * (gamma * tc_loss + dw_kl_loss) # if self.is_mutual_info:", "in the loss False : removes mutual information discriminator : disvae.discriminator.Discriminator optimizer_d :", "# https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 if self.is_mutual_info: # return vae loss vae_loss = rec_loss + anneal_rec", "the likelihood on the each pixel. Implicitely defines the loss Bernoulli corresponds to", "device, data_size, gamma=40., is_mutual_info=True, is_mss=False, disc_kwargs=dict(neg_slope=0.2, latent_dim=10, hidden_units=1000), optim_kwargs=dict(lr=5e-4, betas=(0.5, 0.9)), **kwargs): super().__init__(**kwargs)", "storer = self._pre_call(is_train, storer) batch_size = data.size(0) # change latent dist to torch.tensor", "corresponds to MSE, Laplace corresponds to L1. steps_anneal: nool, optional Number of annealing", "+ kl_loss + beta * tc_loss if storer is not None: storer['loss'].append(vae_loss.item()) storer['tc_loss'].append(tc_loss.item())", "gamma : float, optional Weight of the KL divergence term. kwargs: Additional arguments", "loss term. `gamma` in the paper. is_mutual_info : bool True : includes the", "model(data1) rec_loss = _reconstruction_loss(data1, recon_batch, storer=storer, distribution=self.rec_dist) # TODO: remove this kl_loss term", "self.data_size) gamma = self.gamma + 1 dw_kl_loss = (logqz_prodmarginals - logpz).mean() vae_loss =", "the reconstruction loss. Bernoulli corresponds to a binary cross entropy (bse), Gaussian corresponds", "of training iterations for interpolating C. gamma : float, optional Weight of the", "either minibatch weighted sampling or minibatch stratified sampling according to [1] Parameters ----------", "# if self.is_mutual_info: # beta = self.beta # kl_loss = _kl_normal_loss(*latent_dist, storer) #", "vae_loss.backward(retain_graph=True) optimizer.step() # Discriminator Loss # Get second sample of latent distribution latent_sample2", "255 elif distribution == \"laplace\": # loss in [0,255] space but normalized by", "Weight of the TC loss term. `gamma` in the paper. is_mutual_info : bool", "by factorising.\" arXiv preprint arXiv:1802.05983 (2018). \"\"\" def __init__(self, device, data_size, gamma=40., is_mutual_info=True,", "in which to store important variables for vizualisation. \"\"\" def _pre_call(self, is_train, storer):", "kl divergence. References: [1] Higgins, Irina, et al. \"beta-vae: Learning basic visual concepts", "1: storer = storer else: storer = None return storer class BetaHLoss(BaseLoss): \"\"\"", "self.optimizer_d = optim.Adam(self.discriminator.parameters(), **optim_kwargs) def __call__(self, data, model, optimizer, storer): storer = self._pre_call(model.training,", "in [0,255] space but normalized by 255 to not be too big but", "else: # minibatch stratified sampling logqz, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample, self.dataset_size) # rec", "height, width = recon_data.size() is_colored = n_chan == 3 if distribution == \"bernoulli\":", "steps where gradually adding the regularisation. \"\"\" def __init__(self, record_loss_every=50, rec_dist=\"bernoulli\", steps_anneal=0): self.n_train_steps", "reparameterisation trick shape : (batch_size, latent_dim). References ---------- [1] <NAME>, and <NAME>. \"Disentangling", "entropy (bse), Gaussian corresponds to MSE, Laplace corresponds to L1. steps_anneal: nool, optional", "information discriminator : disvae.discriminator.Discriminator optimizer_d : torch.optim kwargs: Additional arguments for `BaseLoss`, e.g.", "# vae_loss = rec_loss + kl_loss + beta * tc_loss if storer is", "by 255 to not be too big loss = F.mse_loss(recon_data * 255, data", "latent_sample1, self.data_size) else: # minibatch stratified sampling _, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample1, self.data_size)", "def __init__(self, device, data_size, alpha=1., beta=6., gamma=1., is_mss=False, **kwargs): super().__init__(**kwargs) # beta values:", "else 1) loss = rec_loss + anneal_rec * (self.beta * kl_loss) if storer", "binary cross entropy (bse), Gaussian corresponds to MSE, Laplace corresponds to L1. steps_anneal:", "= (F.logsigmoid(d_z) - F.logsigmoid(1 - d_z)).clamp(0).mean() anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if", "in the paper. is_mutual_info : bool True : includes the mutual information term", "<NAME>, and <NAME>. \"Disentangling by factorising.\" arXiv preprint arXiv:1802.05983 (2018). \"\"\" perm =", "[1] Kim, Hyunjik, and <NAME>. \"Disentangling by factorising.\" arXiv preprint arXiv:1802.05983 (2018). \"\"\"", "to torch.tensor (could probably avoid this) latent_dist = torch.stack((latent_dist[0], latent_dist[1]), dim=2) # calculate", "---------- [1] Chen, <NAME>, et al. \"Isolating sources of disentanglement in variational autoencoders.\"", "return vae_loss # Run VAE optimizer optimizer.zero_grad() vae_loss.backward(retain_graph=True) optimizer.step() # Discriminator Loss #", "optional Weight of the kl divergence. References: [1] Higgins, Irina, et al. \"beta-vae:", "optional Number of training iterations for interpolating C. gamma : float, optional Weight", "to MSE, and is sometimes used, but hard to train ecause it ends", "of the normal distribution. Shape (batch_size, latent_dim) storer : dict Dictionary in which", "latent_kl.sum() if storer is not None: storer['kl_loss'].append(total_kl.item()) for i in range(latent_dim): storer['kl_loss_' +", "self.is_mss: # minibatch weighted sampling _, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample1, self.data_size) else: #", "probably avoid this) latent_dist = torch.stack((latent_dist[0], latent_dist[1]), dim=2) # calculate log p(z) prior_params", "data, model, optimizer, storer): storer = self._pre_call(model.training, storer) # factor-vae split data into", "once viz is sorted # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 kl_loss = _kl_normal_loss(*latent_dist, storer) d_z = self.discriminator(latent_sample1)", "__init__(self, beta=4, **kwargs): super().__init__(**kwargs) self.beta = beta def __call__(self, data, recon_data, latent_dist, is_train,", "+ F.logsigmoid(1 - d_z_perm))).mean() # Run discriminator optimizer self.optimizer_d.zero_grad() d_tc_loss.backward() self.optimizer_d.step() if storer", "of q(z_j)) with minibatch stratified sampling. Parameters ---------- latent_dist : torch.Tensor Mean and", "sampling def __call__(self, data, recon_batch, latent_dist, is_train, storer, latent_sample=None): storer = self._pre_call(is_train, storer)", "int, optional Every how many steps to recorsd the loss. rec_dist: {\"bernoulli\", \"gaussian\",", "str(i)].append(latent_kl[i].item()) return total_kl def _permute_dims(latent_sample): \"\"\" Implementation of Algorithm 1 in ref [1].", "variables for vizualisation. \"\"\" def _pre_call(self, is_train, storer): if is_train: self.n_train_steps += 1", "\"\"\" perm = torch.zeros_like(latent_sample) batch_size, dim_z = perm.size() for z in range(dim_z): pi", "not be too big loss = F.mse_loss(recon_data * 255, data * 255, reduction=\"sum\")", "in `_kl_normal_loss` # beta = self.beta + 1 # kl_loss = _dimwise_kl_loss(*latent_dist, storer)", "- (0.5 * (F.logsigmoid(d_z) + F.logsigmoid(1 - d_z_perm))).mean() # Run discriminator optimizer self.optimizer_d.zero_grad()", "kwargs_parse={}): \"\"\"Return the correct loss function given the argparse arguments.\"\"\" kwargs_all = dict(rec_dist=kwargs_parse[\"rec_dist\"],", "255, data * 255, reduction=\"sum\") / 255 elif distribution == \"laplace\": # loss", "- math.log(batch_size * data_size)).sum(dim=1) logqz = torch.logsumexp(_logqz.sum(2), dim=1, keepdim=False) \\ - math.log(batch_size *", "return BetaBLoss(C_init=kwargs_parse[\"betaB_initC\"], C_fin=kwargs_parse[\"betaB_finC\"], C_n_interp=kwargs_parse[\"betaB_stepsC\"], gamma=kwargs_parse[\"betaB_G\"], **kwargs_all) elif name == \"factor\": return FactorKLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"],", "information, total correlation and dim-wise kl rec_loss = _reconstruction_loss(data, recon_batch, storer=storer, distribution=self.rec_dist) mi_loss", "_reconstruction_loss(data, recon_data, distribution=\"bernoulli\", storer=None): \"\"\" Calculates the per image reconstruction loss for a", "Reconstructed data. Shape : (batch_size, n_chan, height, width). distribution : {\"bernoulli\", \"gaussian\", \"laplace\"}", "beta : float, optional Weight of the TC loss term. `gamma` in the", "_logqz, dim=1, keepdim=False).sum(1) return logqz, logqz_prodmarginals def _reconstruction_loss(data, recon_data, distribution=\"bernoulli\", storer=None): \"\"\" Calculates", "storer['dw_kl_loss'].append(dw_kl_loss.item()) # TODO Remove this when visualisation fixed tc_loss_vec = (logqz - logqz_prodmarginals)", "which to store important variables for vizualisation. Returns ------- loss : torch.Tensor Per", "Weight of the mutual information term. beta : float Weight of the total", "arXiv preprint arXiv:1802.05983 (2018). \"\"\" def __init__(self, device, data_size, gamma=40., is_mutual_info=True, is_mss=False, disc_kwargs=dict(neg_slope=0.2,", "None: storer['loss'].append(loss.item()) return loss class BetaBLoss(BaseLoss): \"\"\" Compute the Beta-VAE loss as in", "_, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample1, self.data_size) gamma = self.gamma + 1 dw_kl_loss =", "storer is not None: storer['discrim_loss'].append(d_tc_loss.item()) return vae_loss class BatchTCLoss(BaseLoss): \"\"\" Compute the decomposed", "Compute the Beta-VAE loss as in [1] Parameters ---------- beta : float, optional", "= (logqz_prodmarginals - logpz).mean() vae_loss = rec_loss + anneal_rec * (gamma * tc_loss", "torch.Tensor Input data (e.g. batch of images). Shape : (batch_size, n_chan, height, width).", "else: raise ValueError(\"Uknown loss : {}\".format(name)) class BaseLoss(abc.ABC): \"\"\" Base class for losses.", "= torch.logsumexp(logiw_matrix + _logqz.sum(2), dim=1, keepdim=False) logqz_prodmarginals = torch.logsumexp(logiw_matrix.view(batch_size, batch_size, 1) + _logqz,", "__call__(self, data, model, optimizer, storer): storer = self._pre_call(model.training, storer) # factor-vae split data", "---------- latent_sample: torch.Tensor sample from the latent dimension using the reparameterisation trick shape", "Neural Information Processing Systems. 2018. \"\"\" def __init__(self, device, data_size, alpha=1., beta=6., gamma=1.,", "Processing Systems. 2018. \"\"\" def __init__(self, device, data_size, alpha=1., beta=6., gamma=1., is_mss=False, **kwargs):", "reduction=\"sum\") else: raise ValueError(\"Unkown distribution: {}\".format(distribution)) loss = loss / batch_size if storer", "= torch.zeros(batch_size, latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample, prior_params, batch_size, return_matrix=False).view(batch_size, -1).sum(1) if not", "recon_data, distribution=\"bernoulli\", storer=None): \"\"\" Calculates the per image reconstruction loss for a batch", "distribution == \"bernoulli\": loss = F.binary_cross_entropy(recon_data, data, reduction=\"sum\") elif distribution == \"gaussian\": #", "of the mutual information term. beta : float Weight of the total correlation", ": int Number of data in the training set References : [1] Chen,", "dict Dictionary in which to store important variables for vizualisation. \"\"\" def _pre_call(self,", "-1).sum(1) if not self.is_mss: # minibatch weighted sampling _, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample1,", "storer['loss'].append(loss.item()) return loss class BetaBLoss(BaseLoss): \"\"\" Compute the Beta-VAE loss as in [1]", "def _reconstruction_loss(data, recon_data, distribution=\"bernoulli\", storer=None): \"\"\" Calculates the per image reconstruction loss for", "name == \"betaH\": return BetaHLoss(beta=kwargs_parse[\"betaH_B\"], **kwargs_all) elif name == \"VAE\": return BetaHLoss(beta=1, **kwargs_all)", ": (batch_size, n_chan, height, width). latent_dist : tuple of torch.tensor sufficient statistics of", "recon_batch, storer=storer, distribution=self.rec_dist) # TODO: remove this kl_loss term once viz is sorted", "latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample1, prior_params, half_batch_size, return_matrix=False).view(half_batch_size, -1).sum(1) if not self.is_mss: #", "which might not be optimal. Gaussian distribution corresponds to MSE, and is sometimes", "the mutual information term. beta : float Weight of the total correlation term.", "dimension using the reparameterisation trick shape : (batch_size, latent_dim). References ---------- [1] <NAME>,", "corresponds to a binary cross entropy (bse) loss and is the most commonly", "== 0: return fin assert fin > init delta = fin - init", "data, recon_data, latent_dist, is_train, storer): \"\"\" Calculates loss for a batch of data.", "the each pixel. Implicitely defines the reconstruction loss. Bernoulli corresponds to a binary", "(0.1,0.2) and (0.4,0.5), which might not be optimal. Gaussian distribution corresponds to MSE,", "is not None: storer['kl_loss'].append(total_kl.item()) for i in range(latent_dim): storer['kl_loss_' + str(i)].append(latent_kl[i].item()) return total_kl", "return_matrix=True) logqz_prodmarginals = (torch.logsumexp(_logqz, dim=1, keepdim=False) - math.log(batch_size * data_size)).sum(dim=1) logqz = torch.logsumexp(_logqz.sum(2),", "sampling. Parameters ---------- latent_dist : torch.Tensor Mean and logvar of the normal distribution.", "issue that it doesn't penalize the same way (0.1,0.2) and (0.4,0.5), which might", "kl rec_loss = _reconstruction_loss(data, recon_batch, storer=storer, distribution=self.rec_dist) mi_loss = (logqz_condx - logqz).mean() tc_loss", "\"\"\" def __init__(self, device, data_size, gamma=40., is_mutual_info=True, is_mss=False, disc_kwargs=dict(neg_slope=0.2, latent_dim=10, hidden_units=1000), optim_kwargs=dict(lr=5e-4, betas=(0.5,", "storer['loss'].append(loss.item()) return loss class FactorKLoss(BaseLoss): \"\"\" Compute the Factor-VAE loss as per Algorithm", "for vizualisation. \"\"\" latent_dim = mean.size(1) # batch mean of kl for each", "dim=2) # calculate log q(z|x) and _log q(z) matrix logqz_condx = log_density_normal(latent_sample, latent_dist,", "self.n_train_steps, self.steps_anneal) if model.training else 1) # TODO replace this code with the", "(kl_loss + self.gamma * tc_loss) else: # return vae loss without mutual information", "p(z) prior_params = torch.zeros(batch_size, latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample, prior_params, batch_size, return_matrix=False).view(batch_size, -1).sum(1)", "= log_density_normal(latent_sample1, prior_params, half_batch_size, return_matrix=False).view(half_batch_size, -1).sum(1) if not self.is_mss: # minibatch weighted sampling", "batches. In the paper they sample 2 batches batch_size = data.size(dim=0) half_batch_size =", "important variables for vizualisation. Returns ------- loss : torch.Tensor Per image cross entropy", "F.mse_loss(recon_data * 255, data * 255, reduction=\"sum\") / 255 elif distribution == \"laplace\":", "kwargs_parse[\"no_mss\"], **kwargs_all) else: raise ValueError(\"Uknown loss : {}\".format(name)) class BaseLoss(abc.ABC): \"\"\" Base class", "rec_dist`. References ---------- [1] Burgess, <NAME>., et al. \"Understanding disentangling in $\\beta$-VAE.\" arXiv", "term. kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Burgess, <NAME>.,", "is the same as not doing anything for L1 loss = F.l1_loss(recon_data, data,", "def __init__(self, C_init=0., C_fin=5., C_n_interp=25000, gamma=30., **kwargs): super().__init__(**kwargs) self.gamma = gamma self.C_init =", "super().__init__(**kwargs) self.gamma = gamma self.data_size = data_size self.device = device self.is_mutual_info = is_mutual_info", "vae_loss = rec_loss + anneal_rec * (kl_loss + self.gamma * tc_loss) else: #", "store important variables for vizualisation. \"\"\" latent_dim = mean.size(1) # batch mean of", "loss with either minibatch weighted sampling or minibatch stratified sampling according to [1]", "and dim-wise kl rec_loss = _reconstruction_loss(data, recon_batch, storer=storer, distribution=self.rec_dist) mi_loss = (logqz_condx -", "= torch.zeros_like(latent_sample) batch_size, dim_z = perm.size() for z in range(dim_z): pi = torch.randperm(batch_size).to(latent_sample.device)", "Parameters ---------- record_loss_every: int, optional Every how many steps to recorsd the loss.", "alpha : float Weight of the mutual information term. beta : float Weight", "D is dimension of distribution. logvar : torch.Tensor Diagonal log variance of the", "is_mss=False, disc_kwargs=dict(neg_slope=0.2, latent_dim=10, hidden_units=1000), optim_kwargs=dict(lr=5e-4, betas=(0.5, 0.9)), **kwargs): super().__init__(**kwargs) self.gamma = gamma self.data_size", "abc import math import torch from torch.nn import functional as F from torch", "_minibatch_stratified_sampling(latent_dist, latent_sample1, self.data_size) gamma = self.gamma + 1 dw_kl_loss = (logqz_prodmarginals - logpz).mean()", "loss, mutual information, total correlation and dim-wise kl rec_loss = _reconstruction_loss(data, recon_batch, storer=storer,", "of the TC loss term. `gamma` in the paper. is_mutual_info : bool True", "anneal_rec * (kl_loss + self.gamma * tc_loss) else: # return vae loss without", "dim=1, keepdim=False).sum(1) return logqz, logqz_prodmarginals def _reconstruction_loss(data, recon_data, distribution=\"bernoulli\", storer=None): \"\"\" Calculates the", "to store important variables for vizualisation. Returns ------- loss : torch.Tensor Per image", "BetaHLoss(beta=kwargs_parse[\"betaH_B\"], **kwargs_all) elif name == \"VAE\": return BetaHLoss(beta=1, **kwargs_all) elif name == \"betaB\":", "logvar.exp()).mean(dim=0) total_kl = latent_kl.sum() if storer is not None: storer['kl_loss'].append(total_kl.item()) for i in", "mean : torch.Tensor Mean of the normal distribution. Shape (batch_size, latent_dim) where D", "n_chan, height, width). recon_data : torch.Tensor Reconstructed data. Shape : (batch_size, n_chan, height,", "Discriminator Loss # Get second sample of latent distribution latent_sample2 = model.sample_latent(data2) z_perm", "is_mutual_info=not kwargs_parse[\"no_mutual_info\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) elif name == \"batchTC\": return BatchTCLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], alpha=kwargs_parse[\"batchTC_A\"],", "dimension-wise KL term. latent_dim: int Dimension of the latent variable is_mss : bool", "L1 solves partially the issue of MSE. storer : dict Dictionary in which", "**kwargs): super().__init__(**kwargs) # beta values: dsprites: 6, celeba: 15 self.device = device self.dataset_size", "steps_anneal=0): self.n_train_steps = 0 self.record_loss_every = record_loss_every self.rec_dist = rec_dist self.steps_anneal = steps_anneal", "VAE Loss recon_batch, latent_dist, latent_sample1 = model(data1) rec_loss = _reconstruction_loss(data1, recon_batch, storer=storer, distribution=self.rec_dist)", "logvar + mean.pow(2) + logvar.exp()).mean(dim=0) total_kl = latent_kl.sum() if storer is not None:", "# return vae loss without mutual information term # change latent dist to", "replace this code with the following commented out code after viz is fixed", "arguments for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Burgess, <NAME>., et al. \"Understanding", "sample of latent distribution latent_sample2 = model.sample_latent(data2) z_perm = _permute_dims(latent_sample2).detach() d_z_perm = self.discriminator(z_perm)", "hidden_units=1000), optim_kwargs=dict(lr=5e-4, betas=(0.5, 0.9)), **kwargs): super().__init__(**kwargs) self.gamma = gamma self.data_size = data_size self.device", "data * 255, reduction=\"sum\") / 255 elif distribution == \"laplace\": # loss in", "self.C_n_interp) if is_train else self.C_fin) loss = rec_loss + self.gamma * (kl_loss -", "= self.beta # kl_loss = _kl_normal_loss(*latent_dist, storer) # else: # # beta has", "torch.zeros(batch_size, latent_dist.size(1), 2).to(self.device) logpz = log_density_normal(latent_sample, prior_params, batch_size, return_matrix=False).view(batch_size, -1).sum(1) if not self.is_mss:", "prior_params, batch_size, return_matrix=False).view(batch_size, -1).sum(1) if not self.is_mss: # minibatch weighted sampling logqz, logqz_prodmarginals", "TODO replace this code with the following commented out code after viz is", "tc_loss) else: # return vae loss without mutual information term # change latent", "kl_loss + beta * tc_loss if storer is not None: storer['loss'].append(vae_loss.item()) storer['tc_loss'].append(tc_loss.item()) if", "= is_mss # minibatch stratified sampling def __call__(self, data, recon_batch, latent_dist, is_train, storer,", "Bernoulli corresponds to a binary cross entropy (bse), Gaussian corresponds to MSE, Laplace", "= device self.dataset_size = data_size self.beta = beta self.alpha = alpha self.gamma =", "log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logiw_matrix = log_importance_weight_matrix(batch_size, data_size).to(latent_dist.device) logqz = torch.logsumexp(logiw_matrix + _logqz.sum(2),", "* (-1 - logvar + mean.pow(2) + logvar.exp()).mean(dim=0) total_kl = latent_kl.sum() if storer", "for vizualisation. \"\"\" def _pre_call(self, is_train, storer): if is_train: self.n_train_steps += 1 if", "total_kl = latent_kl.sum() if storer is not None: storer['kl_loss'].append(total_kl.item()) for i in range(latent_dim):", "loss class FactorKLoss(BaseLoss): \"\"\" Compute the Factor-VAE loss as per Algorithm 2 of", "\"\"\" import abc import math import torch from torch.nn import functional as F", "_permute_dims(latent_sample2).detach() d_z_perm = self.discriminator(z_perm) # Calculate total correlation loss d_tc_loss = - (0.5", "steps_anneal @abc.abstractmethod def __call__(self, data, recon_data, latent_dist, is_train, storer): \"\"\" Calculates loss for", "is_train else self.C_fin) loss = rec_loss + self.gamma * (kl_loss - C).abs() batch_size", "height, width). latent_dist : tuple of torch.tensor sufficient statistics of the latent dimension.", "for `BaseLoss`, e.g. rec_dist`. References ---------- [1] Burgess, <NAME>., et al. \"Understanding disentangling", "float Weight of the mutual information term. beta : float Weight of the", "factorising.\" arXiv preprint arXiv:1802.05983 (2018). \"\"\" perm = torch.zeros_like(latent_sample) batch_size, dim_z = perm.size()", "rec_loss + anneal_rec * (self.alpha * mi_loss + self.beta * tc_loss + self.gamma", "= log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=True) logiw_matrix = log_importance_weight_matrix(batch_size, data_size).to(latent_dist.device) logqz = torch.logsumexp(logiw_matrix +", ": float, optional Weight of the kl divergence. References: [1] Higgins, Irina, et", "loss loss = rec_loss + anneal_rec * (self.alpha * mi_loss + self.beta *", "rec_loss = _reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer) C = (linear_annealing(self.C_init,", "__init__(self, C_init=0., C_fin=5., C_n_interp=25000, gamma=30., **kwargs): super().__init__(**kwargs) self.gamma = gamma self.C_init = C_init", "float Weight of the dimension-wise KL term. latent_dim: int Dimension of the latent", "be too big loss = F.mse_loss(recon_data * 255, data * 255, reduction=\"sum\") /", "- logqz_prodmarginals).mean() dw_kl_loss = (logqz_prodmarginals - logpz).mean() anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal)", "name == \"betaB\": return BetaBLoss(C_init=kwargs_parse[\"betaB_initC\"], C_fin=kwargs_parse[\"betaB_finC\"], C_n_interp=kwargs_parse[\"betaB_stepsC\"], gamma=kwargs_parse[\"betaB_G\"], **kwargs_all) elif name == \"factor\":", "is sometimes used, but hard to train ecause it ends up focusing only", "[1] Parameters ---------- C_init : float, optional Starting annealed capacity C. C_fin :", "Run discriminator optimizer self.optimizer_d.zero_grad() d_tc_loss.backward() self.optimizer_d.step() if storer is not None: storer['discrim_loss'].append(d_tc_loss.item()) return", "fin assert fin > init delta = fin - init annealed = min(init", "Dictionary in which to store important variables for vizualisation. \"\"\" def _pre_call(self, is_train,", "(latent_dist) across the batch for each of the latent dimensions (mean and log_var).", "the TC term is included in `_kl_normal_loss` # beta = self.beta + 1", "way (0.1,0.2) and (0.4,0.5), which might not be optimal. Gaussian distribution corresponds to", "paper. is_mutual_info : bool True : includes the mutual information term in the", "Algorithm 1 in ref [1]. Randomly permutes the sample from q(z) (latent_dist) across", "pixel and channel) \"\"\" batch_size, n_chan, height, width = recon_data.size() is_colored = n_chan", "log (product of marginals of q(z_j)) with minibatch weighted sampling. Parameters ---------- latent_dist", "= _reconstruction_loss(data, recon_data, storer=storer, distribution=self.rec_dist) kl_loss = _kl_normal_loss(*latent_dist, storer) anneal_rec = (linear_annealing(0, 1,", "/ 255 elif distribution == \"laplace\": # loss in [0,255] space but normalized", "logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample, self.dataset_size) else: # minibatch stratified sampling logqz, logqz_prodmarginals =", "<NAME>. \"Disentangling by factorising.\" arXiv preprint arXiv:1802.05983 (2018). \"\"\" def __init__(self, device, data_size,", "data_size : int Number of data in the training set References : [1]", "dim=1, keepdim=False) \\ - math.log(batch_size * data_size) return logqz, logqz_prodmarginals def _minibatch_stratified_sampling(latent_dist, latent_sample,", "the latent dimension. E.g. for gaussian (mean, log_var) each of shape : (batch_size,", "sometimes used, but hard to train ecause it ends up focusing only a", "framework.\" (2016). kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`. \"\"\" def __init__(self, beta=4,", "if distribution == \"bernoulli\": loss = F.binary_cross_entropy(recon_data, data, reduction=\"sum\") elif distribution == \"gaussian\":", "`_kl_normal_loss` # beta = self.beta + 1 # kl_loss = _dimwise_kl_loss(*latent_dist, storer) #", "of data. Parameters ---------- data : torch.Tensor Input data (e.g. batch of images).", "= self._pre_call(model.training, storer) # factor-vae split data into two batches. In the paper", "for interpolating C. gamma : float, optional Weight of the KL divergence term.", "for each of the latent dimensions (mean and log_var). Parameters ---------- latent_sample: torch.Tensor", "vizualisation. \"\"\" def _pre_call(self, is_train, storer): if is_train: self.n_train_steps += 1 if not", "dimension using the reparameterisation trick shape : (batch_size, latent_dim). data_size : int Number", "rec_loss + kl_loss + beta * tc_loss if storer is not None: storer['loss'].append(vae_loss.item())", "Distribution of the likelihood on the each pixel. Implicitely defines the loss Bernoulli", "References: [1] Higgins, Irina, et al. \"beta-vae: Learning basic visual concepts with a", "step, annealing_steps): \"\"\"Linear annealing of a parameter.\"\"\" if annealing_steps == 0: return fin", "\"factor\": return FactorKLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], gamma=kwargs_parse[\"factor_G\"], is_mutual_info=not kwargs_parse[\"no_mutual_info\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) elif name ==", "if annealing_steps == 0: return fin assert fin > init delta = fin", "data, recon_batch, latent_dist, is_train, storer, latent_sample=None): storer = self._pre_call(is_train, storer) batch_size = data.size(0)", "fin, step, annealing_steps): \"\"\"Linear annealing of a parameter.\"\"\" if annealing_steps == 0: return", "def _permute_dims(latent_sample): \"\"\" Implementation of Algorithm 1 in ref [1]. Randomly permutes the", "by factorising.\" arXiv preprint arXiv:1802.05983 (2018). \"\"\" perm = torch.zeros_like(latent_sample) batch_size, dim_z =", "// 2 data = data.split(half_batch_size) data1 = data[0] data2 = data[1] # Factor", "References ---------- [1] Burgess, <NAME>., et al. \"Understanding disentangling in $\\beta$-VAE.\" arXiv preprint", "recon_data : torch.Tensor Reconstructed data. Shape : (batch_size, n_chan, height, width). latent_dist :", "loss = rec_loss + self.gamma * (kl_loss - C).abs() batch_size = data.size(0) if", "logiw_matrix = log_importance_weight_matrix(batch_size, data_size).to(latent_dist.device) logqz = torch.logsumexp(logiw_matrix + _logqz.sum(2), dim=1, keepdim=False) logqz_prodmarginals =", "likelihood on the each pixel. Implicitely defines the loss Bernoulli corresponds to a", "_, logqz_prodmarginals = _minibatch_weighted_sampling(latent_dist, latent_sample1, self.data_size) else: # minibatch stratified sampling _, logqz_prodmarginals", "the mutual information term in the loss False : removes mutual information discriminator", "don't backprop if evaluating return vae_loss # Run VAE optimizer optimizer.zero_grad() vae_loss.backward(retain_graph=True) optimizer.step()", "name == \"batchTC\": return BatchTCLoss(kwargs_parse[\"device\"], kwargs_parse[\"data_size\"], alpha=kwargs_parse[\"batchTC_A\"], beta=kwargs_parse[\"batchTC_B\"], gamma=kwargs_parse[\"batchTC_G\"], is_mss=not kwargs_parse[\"no_mss\"], **kwargs_all) else:", "**kwargs): super().__init__(**kwargs) self.beta = beta def __call__(self, data, recon_data, latent_dist, is_train, storer): storer", "batch_size, return_matrix=False).sum(dim=1) # calculate log p(z) prior_params = torch.zeros(batch_size, latent_dist.size(1), 2).to(self.device) logpz =", "self.beta + 1 # kl_loss = _dimwise_kl_loss(*latent_dist, storer) # # vae_loss = rec_loss", "latent dimension using the reparameterisation trick shape : (batch_size, latent_dim). References ---------- [1]", "class BetaHLoss(BaseLoss): \"\"\" Compute the Beta-VAE loss as in [1] Parameters ---------- beta", "information term. beta : float Weight of the total correlation term. gamma :", "self.beta = beta self.alpha = alpha self.gamma = gamma self.is_mss = is_mss #", "# Run discriminator optimizer self.optimizer_d.zero_grad() d_tc_loss.backward() self.optimizer_d.step() if storer is not None: storer['discrim_loss'].append(d_tc_loss.item())", "torch.Tensor Mean of the normal distribution. Shape (batch_size, latent_dim) where D is dimension", "with a constrained variational framework.\" (2016). kwargs: Additional arguments for `BaseLoss`, e.g. rec_dist`.", "loss Bernoulli corresponds to a binary cross entropy (bse) loss and is the", "(batch_size, n_chan, height, width). recon_data : torch.Tensor Reconstructed data. Shape : (batch_size, n_chan,", "self.C_init = C_init self.C_fin = C_fin self.C_n_interp = C_n_interp def __call__(self, data, recon_data,", "C. C_fin : float, optional Final annealed capacity C. C_n_interp : float, optional", "and log_var). Parameters ---------- latent_sample: torch.Tensor sample from the latent dimension using the", "beta=4, **kwargs): super().__init__(**kwargs) self.beta = beta def __call__(self, data, recon_data, latent_dist, is_train, storer):", "# kl_loss = _kl_normal_loss(*latent_dist, storer) # else: # # beta has to be", "of a parameter.\"\"\" if annealing_steps == 0: return fin assert fin > init", "storer['tc_loss'].append(tc_loss.item()) if not model.training: # don't backprop if evaluating return vae_loss # Run", "data1 = data[0] data2 = data[1] # Factor VAE Loss recon_batch, latent_dist, latent_sample1", "data_size and device def get_loss_f(name, kwargs_parse={}): \"\"\"Return the correct loss function given the", "loss without mutual information term # change latent dist to torch.tensor (could probably", "capacity C. C_fin : float, optional Final annealed capacity C. C_n_interp : float,", "is_mss # minibatch stratified sampling def __call__(self, data, recon_batch, latent_dist, is_train, storer, latent_sample=None):", "term. `gamma` in the paper. is_mutual_info : bool True : includes the mutual", "Diagonal log variance of the normal distribution. Shape (batch_size, latent_dim) storer : dict", "def _minibatch_weighted_sampling(latent_dist, latent_sample, data_size): \"\"\" Estimates log q(z) and the log (product of", "Implicitely defines the reconstruction loss. Bernoulli corresponds to a binary cross entropy (bse),", "is not None: storer['recon_loss'].append(loss.item()) return loss def _kl_normal_loss(mean, logvar, storer=None): \"\"\" Calculates the", "rec_dist: {\"bernoulli\", \"gaussian\", \"laplace\"}, optional Reconstruction distribution istribution of the likelihood on the", "from disvae.utils.math import log_density_normal, log_importance_weight_matrix # TO-DO: clean data_size and device def get_loss_f(name,", "+ self.gamma * (kl_loss - C).abs() batch_size = data.size(0) if storer is not", "rec_loss + self.gamma * (kl_loss - C).abs() batch_size = data.size(0) if storer is", "from the latent dimension using the reparameterisation trick shape : (batch_size, latent_dim). References", "self.C_n_interp = C_n_interp def __call__(self, data, recon_data, latent_dist, is_train, storer): storer = self._pre_call(is_train,", "this kl_loss term once viz is sorted # https://github.com/YannDubs/disentangling-vae/pull/25#issuecomment-473535863 kl_loss = _kl_normal_loss(*latent_dist, storer)", "reduction=\"sum\") / 255 elif distribution == \"laplace\": # loss in [0,255] space but", "None return storer class BetaHLoss(BaseLoss): \"\"\" Compute the Beta-VAE loss as in [1]", "sample from the latent dimension using the reparameterisation trick shape : (batch_size, latent_dim).", "calculate log q(z|x) and _log q(z) matrix logqz_condx = log_density_normal(latent_sample, latent_dist, batch_size, return_matrix=False).sum(dim=1)", "is not None: storer['loss'].append(vae_loss.item()) storer['tc_loss'].append(tc_loss.item()) if not model.training: # don't backprop if evaluating", "data. Shape : (batch_size, n_chan, height, width). latent_dist : tuple of torch.tensor sufficient", "(logqz - logqz_prodmarginals).mean() dw_kl_loss = (logqz_prodmarginals - logpz).mean() anneal_rec = (linear_annealing(0, 1, self.n_train_steps,", "Reconstruction distribution istribution of the likelihood on the each pixel. Implicitely defines the", "not doing anything for L1 loss = F.l1_loss(recon_data, data, reduction=\"sum\") else: raise ValueError(\"Unkown", "storer is not None: storer['loss'].append(loss.item()) return loss class BetaBLoss(BaseLoss): \"\"\" Compute the Beta-VAE", "reduction=\"sum\") elif distribution == \"gaussian\": # loss in [0,255] space but normalized by", "correlation loss d_tc_loss = - (0.5 * (F.logsigmoid(d_z) + F.logsigmoid(1 - d_z_perm))).mean() #", "- logpz).mean() anneal_rec = (linear_annealing(0, 1, self.n_train_steps, self.steps_anneal) if is_train else 1) #", "beta def __call__(self, data, recon_data, latent_dist, is_train, storer): storer = self._pre_call(is_train, storer) rec_loss", "pixels that are very wrong. Laplace distribution corresponds to L1 solves partially the", "= rec_dist self.steps_anneal = steps_anneal @abc.abstractmethod def __call__(self, data, recon_data, latent_dist, is_train, storer):", "2).to(self.device) logpz = log_density_normal(latent_sample, prior_params, batch_size, return_matrix=False).view(batch_size, -1).sum(1) if not self.is_mss: # minibatch", "cannot be negative : TEST tc_loss = (F.logsigmoid(d_z) - F.logsigmoid(1 - d_z)).clamp(0).mean() anneal_rec", "d_z_perm = self.discriminator(z_perm) # Calculate total correlation loss d_tc_loss = - (0.5 *", "width). distribution : {\"bernoulli\", \"gaussian\", \"laplace\"} Distribution of the likelihood on the each", "gamma = self.gamma + 1 dw_kl_loss = (logqz_prodmarginals - logpz).mean() vae_loss = rec_loss", "optimizer.zero_grad() vae_loss.backward(retain_graph=True) optimizer.step() # Discriminator Loss # Get second sample of latent distribution", "n_chan == 3 if distribution == \"bernoulli\": loss = F.binary_cross_entropy(recon_data, data, reduction=\"sum\") elif", "the paper. is_mutual_info : bool True : includes the mutual information term in", "which to store important variables for vizualisation. \"\"\" latent_dim = mean.size(1) # batch", "0 because TC cannot be negative : TEST tc_loss = (F.logsigmoid(d_z) - F.logsigmoid(1", "__init__(self, record_loss_every=50, rec_dist=\"bernoulli\", steps_anneal=0): self.n_train_steps = 0 self.record_loss_every = record_loss_every self.rec_dist = rec_dist", "record_loss_every self.rec_dist = rec_dist self.steps_anneal = steps_anneal @abc.abstractmethod def __call__(self, data, recon_data, latent_dist,", "Compute the Beta-VAE loss as in [1] Parameters ---------- C_init : float, optional", "minibatch weighted sampling or minibatch stratified sampling according to [1] Parameters ---------- data_size:", ": dict Dictionary in which to store important variables for vizualisation. \"\"\" def", "else: # minibatch stratified sampling _, logqz_prodmarginals = _minibatch_stratified_sampling(latent_dist, latent_sample1, self.data_size) gamma =", "log_importance_weight_matrix(batch_size, data_size).to(latent_dist.device) logqz = torch.logsumexp(logiw_matrix + _logqz.sum(2), dim=1, keepdim=False) logqz_prodmarginals = torch.logsumexp(logiw_matrix.view(batch_size, batch_size,", "if storer is not None: storer['loss'].append(loss.item()) return loss class BetaBLoss(BaseLoss): \"\"\" Compute the", "data_size).to(latent_dist.device) logqz = torch.logsumexp(logiw_matrix + _logqz.sum(2), dim=1, keepdim=False) logqz_prodmarginals = torch.logsumexp(logiw_matrix.view(batch_size, batch_size, 1)" ]
[ "django.test import TestCase from django.urls import reverse from model_mommy import mommy from monitor.models", "path = ( 'monitor.api.serializers.twitter_user_serializers.' 'retrieve_tweets.delay' ) with mock.patch(path, mock.Mock()) as retrieve_tweets: response =", "self.assertEqual(TwitterUser.objects.count(), 3) path = ( 'monitor.api.serializers.twitter_user_serializers.' 'retrieve_tweets.delay' ) with mock.patch(path, mock.Mock()) as retrieve_tweets:", "def test_get(self): url = reverse('monitor:usernames') response = self.client.get(url) self.assertEqual(len(response.data), 3) for count, user", "= reverse('monitor:usernames') response = self.client.get(url) self.assertEqual(len(response.data), 3) for count, user in enumerate(self.users): self.assertEqual(", "import mock class TestTwitterUserView(HTTPClientMixin, TestCase): def setUp(self): super(TestTwitterUserView, self).setUp() self.url = reverse('monitor:users') self.users", "{'username': 'test'}) retrieve_tweets.assert_called() self.assertEqual(TwitterUser.objects.count(), 4) self.assertEqual(response.data.get('username'), 'test') class TestUsernameListView(HTTPClientMixin, TestCase): def setUp(self): super(TestUsernameListView,", "= mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): url = reverse('monitor:usernames') response = self.client.get(url) self.assertEqual(len(response.data), 3)", "_quantity=3) def test_get(self): url = reverse('monitor:usernames') response = self.client.get(url) self.assertEqual(len(response.data), 3) for count,", "from monitor.models import TwitterUser from monitor.tests.utils.http_client_mixin import HTTPClientMixin import mock class TestTwitterUserView(HTTPClientMixin, TestCase):", "'retrieve_tweets.delay' ) with mock.patch(path, mock.Mock()) as retrieve_tweets: response = self.client.post(self.url, {'username': 'test'}) retrieve_tweets.assert_called()", "with mock.patch(path, mock.Mock()) as retrieve_tweets: response = self.client.post(self.url, {'username': 'test'}) retrieve_tweets.assert_called() self.assertEqual(TwitterUser.objects.count(), 4)", "TestTwitterUserView(HTTPClientMixin, TestCase): def setUp(self): super(TestTwitterUserView, self).setUp() self.url = reverse('monitor:users') self.users = mommy.make('monitor.TwitterUser', _quantity=3)", "mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): url = reverse('monitor:usernames') response = self.client.get(url) self.assertEqual(len(response.data), 3) for", "TestCase from django.urls import reverse from model_mommy import mommy from monitor.models import TwitterUser", "as retrieve_tweets: response = self.client.post(self.url, {'username': 'test'}) retrieve_tweets.assert_called() self.assertEqual(TwitterUser.objects.count(), 4) self.assertEqual(response.data.get('username'), 'test') class", "3) for count, user in enumerate(self.users): self.assertEqual(response.data[count].get('id'), user.id) def test_post(self): self.assertEqual(TwitterUser.objects.count(), 3) path", "self.users = mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): url = reverse('monitor:usernames') response = self.client.get(url) self.assertEqual(len(response.data),", ") with mock.patch(path, mock.Mock()) as retrieve_tweets: response = self.client.post(self.url, {'username': 'test'}) retrieve_tweets.assert_called() self.assertEqual(TwitterUser.objects.count(),", "setUp(self): super(TestTwitterUserView, self).setUp() self.url = reverse('monitor:users') self.users = mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): response", "retrieve_tweets.assert_called() self.assertEqual(TwitterUser.objects.count(), 4) self.assertEqual(response.data.get('username'), 'test') class TestUsernameListView(HTTPClientMixin, TestCase): def setUp(self): super(TestUsernameListView, self).setUp() self.users", "= self.client.post(self.url, {'username': 'test'}) retrieve_tweets.assert_called() self.assertEqual(TwitterUser.objects.count(), 4) self.assertEqual(response.data.get('username'), 'test') class TestUsernameListView(HTTPClientMixin, TestCase): def", "_quantity=3) def test_get(self): response = self.client.get(self.url) self.assertEqual(len(response.data), 3) for count, user in enumerate(self.users):", "def setUp(self): super(TestUsernameListView, self).setUp() self.users = mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): url = reverse('monitor:usernames')", "setUp(self): super(TestUsernameListView, self).setUp() self.users = mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): url = reverse('monitor:usernames') response", "monitor.tests.utils.http_client_mixin import HTTPClientMixin import mock class TestTwitterUserView(HTTPClientMixin, TestCase): def setUp(self): super(TestTwitterUserView, self).setUp() self.url", "retrieve_tweets: response = self.client.post(self.url, {'username': 'test'}) retrieve_tweets.assert_called() self.assertEqual(TwitterUser.objects.count(), 4) self.assertEqual(response.data.get('username'), 'test') class TestUsernameListView(HTTPClientMixin,", "'test'}) retrieve_tweets.assert_called() self.assertEqual(TwitterUser.objects.count(), 4) self.assertEqual(response.data.get('username'), 'test') class TestUsernameListView(HTTPClientMixin, TestCase): def setUp(self): super(TestUsernameListView, self).setUp()", "mommy from monitor.models import TwitterUser from monitor.tests.utils.http_client_mixin import HTTPClientMixin import mock class TestTwitterUserView(HTTPClientMixin,", "self).setUp() self.url = reverse('monitor:users') self.users = mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): response = self.client.get(self.url)", "TestCase): def setUp(self): super(TestTwitterUserView, self).setUp() self.url = reverse('monitor:users') self.users = mommy.make('monitor.TwitterUser', _quantity=3) def", "self.url = reverse('monitor:users') self.users = mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): response = self.client.get(self.url) self.assertEqual(len(response.data),", "'monitor.api.serializers.twitter_user_serializers.' 'retrieve_tweets.delay' ) with mock.patch(path, mock.Mock()) as retrieve_tweets: response = self.client.post(self.url, {'username': 'test'})", "class TestTwitterUserView(HTTPClientMixin, TestCase): def setUp(self): super(TestTwitterUserView, self).setUp() self.url = reverse('monitor:users') self.users = mommy.make('monitor.TwitterUser',", "3) path = ( 'monitor.api.serializers.twitter_user_serializers.' 'retrieve_tweets.delay' ) with mock.patch(path, mock.Mock()) as retrieve_tweets: response", "( 'monitor.api.serializers.twitter_user_serializers.' 'retrieve_tweets.delay' ) with mock.patch(path, mock.Mock()) as retrieve_tweets: response = self.client.post(self.url, {'username':", "response = self.client.get(self.url) self.assertEqual(len(response.data), 3) for count, user in enumerate(self.users): self.assertEqual(response.data[count].get('id'), user.id) def", "import reverse from model_mommy import mommy from monitor.models import TwitterUser from monitor.tests.utils.http_client_mixin import", "super(TestUsernameListView, self).setUp() self.users = mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): url = reverse('monitor:usernames') response =", "'test') class TestUsernameListView(HTTPClientMixin, TestCase): def setUp(self): super(TestUsernameListView, self).setUp() self.users = mommy.make('monitor.TwitterUser', _quantity=3) def", "def test_post(self): self.assertEqual(TwitterUser.objects.count(), 3) path = ( 'monitor.api.serializers.twitter_user_serializers.' 'retrieve_tweets.delay' ) with mock.patch(path, mock.Mock())", "4) self.assertEqual(response.data.get('username'), 'test') class TestUsernameListView(HTTPClientMixin, TestCase): def setUp(self): super(TestUsernameListView, self).setUp() self.users = mommy.make('monitor.TwitterUser',", "reverse('monitor:usernames') response = self.client.get(url) self.assertEqual(len(response.data), 3) for count, user in enumerate(self.users): self.assertEqual( response.data[count].get('username'),", "user in enumerate(self.users): self.assertEqual(response.data[count].get('id'), user.id) def test_post(self): self.assertEqual(TwitterUser.objects.count(), 3) path = ( 'monitor.api.serializers.twitter_user_serializers.'", "response = self.client.get(url) self.assertEqual(len(response.data), 3) for count, user in enumerate(self.users): self.assertEqual( response.data[count].get('username'), user.username", "= self.client.get(self.url) self.assertEqual(len(response.data), 3) for count, user in enumerate(self.users): self.assertEqual(response.data[count].get('id'), user.id) def test_post(self):", "import TwitterUser from monitor.tests.utils.http_client_mixin import HTTPClientMixin import mock class TestTwitterUserView(HTTPClientMixin, TestCase): def setUp(self):", "reverse('monitor:users') self.users = mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): response = self.client.get(self.url) self.assertEqual(len(response.data), 3) for", "mock.Mock()) as retrieve_tweets: response = self.client.post(self.url, {'username': 'test'}) retrieve_tweets.assert_called() self.assertEqual(TwitterUser.objects.count(), 4) self.assertEqual(response.data.get('username'), 'test')", "reverse from model_mommy import mommy from monitor.models import TwitterUser from monitor.tests.utils.http_client_mixin import HTTPClientMixin", "TwitterUser from monitor.tests.utils.http_client_mixin import HTTPClientMixin import mock class TestTwitterUserView(HTTPClientMixin, TestCase): def setUp(self): super(TestTwitterUserView,", "model_mommy import mommy from monitor.models import TwitterUser from monitor.tests.utils.http_client_mixin import HTTPClientMixin import mock", "mock class TestTwitterUserView(HTTPClientMixin, TestCase): def setUp(self): super(TestTwitterUserView, self).setUp() self.url = reverse('monitor:users') self.users =", "from django.urls import reverse from model_mommy import mommy from monitor.models import TwitterUser from", "monitor.models import TwitterUser from monitor.tests.utils.http_client_mixin import HTTPClientMixin import mock class TestTwitterUserView(HTTPClientMixin, TestCase): def", "self).setUp() self.users = mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): url = reverse('monitor:usernames') response = self.client.get(url)", "TestUsernameListView(HTTPClientMixin, TestCase): def setUp(self): super(TestUsernameListView, self).setUp() self.users = mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): url", "test_post(self): self.assertEqual(TwitterUser.objects.count(), 3) path = ( 'monitor.api.serializers.twitter_user_serializers.' 'retrieve_tweets.delay' ) with mock.patch(path, mock.Mock()) as", "self.assertEqual(len(response.data), 3) for count, user in enumerate(self.users): self.assertEqual(response.data[count].get('id'), user.id) def test_post(self): self.assertEqual(TwitterUser.objects.count(), 3)", "response = self.client.post(self.url, {'username': 'test'}) retrieve_tweets.assert_called() self.assertEqual(TwitterUser.objects.count(), 4) self.assertEqual(response.data.get('username'), 'test') class TestUsernameListView(HTTPClientMixin, TestCase):", "self.assertEqual(TwitterUser.objects.count(), 4) self.assertEqual(response.data.get('username'), 'test') class TestUsernameListView(HTTPClientMixin, TestCase): def setUp(self): super(TestUsernameListView, self).setUp() self.users =", "mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): response = self.client.get(self.url) self.assertEqual(len(response.data), 3) for count, user in", "user.id) def test_post(self): self.assertEqual(TwitterUser.objects.count(), 3) path = ( 'monitor.api.serializers.twitter_user_serializers.' 'retrieve_tweets.delay' ) with mock.patch(path,", "self.assertEqual(response.data.get('username'), 'test') class TestUsernameListView(HTTPClientMixin, TestCase): def setUp(self): super(TestUsernameListView, self).setUp() self.users = mommy.make('monitor.TwitterUser', _quantity=3)", "from monitor.tests.utils.http_client_mixin import HTTPClientMixin import mock class TestTwitterUserView(HTTPClientMixin, TestCase): def setUp(self): super(TestTwitterUserView, self).setUp()", "super(TestTwitterUserView, self).setUp() self.url = reverse('monitor:users') self.users = mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): response =", "from django.test import TestCase from django.urls import reverse from model_mommy import mommy from", "= mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): response = self.client.get(self.url) self.assertEqual(len(response.data), 3) for count, user", "enumerate(self.users): self.assertEqual(response.data[count].get('id'), user.id) def test_post(self): self.assertEqual(TwitterUser.objects.count(), 3) path = ( 'monitor.api.serializers.twitter_user_serializers.' 'retrieve_tweets.delay' )", "from model_mommy import mommy from monitor.models import TwitterUser from monitor.tests.utils.http_client_mixin import HTTPClientMixin import", "HTTPClientMixin import mock class TestTwitterUserView(HTTPClientMixin, TestCase): def setUp(self): super(TestTwitterUserView, self).setUp() self.url = reverse('monitor:users')", "def test_get(self): response = self.client.get(self.url) self.assertEqual(len(response.data), 3) for count, user in enumerate(self.users): self.assertEqual(response.data[count].get('id'),", "self.client.get(self.url) self.assertEqual(len(response.data), 3) for count, user in enumerate(self.users): self.assertEqual(response.data[count].get('id'), user.id) def test_post(self): self.assertEqual(TwitterUser.objects.count(),", "test_get(self): url = reverse('monitor:usernames') response = self.client.get(url) self.assertEqual(len(response.data), 3) for count, user in", "self.users = mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): response = self.client.get(self.url) self.assertEqual(len(response.data), 3) for count,", "= ( 'monitor.api.serializers.twitter_user_serializers.' 'retrieve_tweets.delay' ) with mock.patch(path, mock.Mock()) as retrieve_tweets: response = self.client.post(self.url,", "def setUp(self): super(TestTwitterUserView, self).setUp() self.url = reverse('monitor:users') self.users = mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self):", "self.assertEqual(response.data[count].get('id'), user.id) def test_post(self): self.assertEqual(TwitterUser.objects.count(), 3) path = ( 'monitor.api.serializers.twitter_user_serializers.' 'retrieve_tweets.delay' ) with", "import HTTPClientMixin import mock class TestTwitterUserView(HTTPClientMixin, TestCase): def setUp(self): super(TestTwitterUserView, self).setUp() self.url =", "in enumerate(self.users): self.assertEqual(response.data[count].get('id'), user.id) def test_post(self): self.assertEqual(TwitterUser.objects.count(), 3) path = ( 'monitor.api.serializers.twitter_user_serializers.' 'retrieve_tweets.delay'", "= reverse('monitor:users') self.users = mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): response = self.client.get(self.url) self.assertEqual(len(response.data), 3)", "url = reverse('monitor:usernames') response = self.client.get(url) self.assertEqual(len(response.data), 3) for count, user in enumerate(self.users):", "TestCase): def setUp(self): super(TestUsernameListView, self).setUp() self.users = mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self): url =", "test_get(self): response = self.client.get(self.url) self.assertEqual(len(response.data), 3) for count, user in enumerate(self.users): self.assertEqual(response.data[count].get('id'), user.id)", "import mommy from monitor.models import TwitterUser from monitor.tests.utils.http_client_mixin import HTTPClientMixin import mock class", "class TestUsernameListView(HTTPClientMixin, TestCase): def setUp(self): super(TestUsernameListView, self).setUp() self.users = mommy.make('monitor.TwitterUser', _quantity=3) def test_get(self):", "self.client.post(self.url, {'username': 'test'}) retrieve_tweets.assert_called() self.assertEqual(TwitterUser.objects.count(), 4) self.assertEqual(response.data.get('username'), 'test') class TestUsernameListView(HTTPClientMixin, TestCase): def setUp(self):", "mock.patch(path, mock.Mock()) as retrieve_tweets: response = self.client.post(self.url, {'username': 'test'}) retrieve_tweets.assert_called() self.assertEqual(TwitterUser.objects.count(), 4) self.assertEqual(response.data.get('username'),", "for count, user in enumerate(self.users): self.assertEqual(response.data[count].get('id'), user.id) def test_post(self): self.assertEqual(TwitterUser.objects.count(), 3) path =", "import TestCase from django.urls import reverse from model_mommy import mommy from monitor.models import", "= self.client.get(url) self.assertEqual(len(response.data), 3) for count, user in enumerate(self.users): self.assertEqual( response.data[count].get('username'), user.username )", "django.urls import reverse from model_mommy import mommy from monitor.models import TwitterUser from monitor.tests.utils.http_client_mixin", "count, user in enumerate(self.users): self.assertEqual(response.data[count].get('id'), user.id) def test_post(self): self.assertEqual(TwitterUser.objects.count(), 3) path = (" ]
[ "miss pressure.\") # .format(sid, year)) # statistics temperature valid_temperature = recs.query(\"60 > AVG_TEMP", "__name__ == \"__main__\": # testing code # import sys # print(sys.argv) tool =", "mon_rec = self.calcYear(sid, year, recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>6}\\n\").format( \"SID\", \"YEAR\", \"AVG_PRES\",", "def statisticsMonthly(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in filelist: srcPath", "fo.writelines(result) fo.close() def calcYear(self, sid, year, recs): if len(recs) > 0: # statistics", "temporary change valid_pressure = hours24.query((\"1200> PRES > 600 \\ & HR in [2,", "statistics pressure # valid_pressure = hours24.query(\"1200 > PRES > 600\") # temporary change", "= valid_prec[\"PREC24\"].sum() prec_cnt = len(valid_prec) if prec_cnt == 0: prec_mon = 999999 valid_prec", "avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec24, prec24_cnt, prec12_pm, prec12_pm_cnt, prec12_am, prec12_am_cnt) else:", "# items[7] == \"0\" # rec = strfmt.format(items[0], int(items[1]), int(items[2]), # int(items[3]), int(items[4]),", "int(filename[:4]) # mon = int(filename[4:6]) # day = int(filename[6:8]) # recs = []", "24: # ok for 24 hours avg_temp = valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp", "for item in filelist: srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsYearSingleStatation(item,", "== 4: avg_pres = valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else: avg_pres", "24 records on\").format( sid, dt.year, dt.month, dt.day)) else: # statistics pressure # valid_pressure", "recs.query(\"1200 > AVG_PRES > 800\") # print(valid_pressure) if len(valid_pressure) >= 24: avg_pres =", "self.calcMonthly(sid, year, mon, recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>12}{:>6}\\n\").format( \"SID\", \"YEAR\", \"MON\", \"AVG_PRES\",", "= whf + timedelta(hours=24) cond = \"{0} < DATETIME <= {1}\".format(whf.strftime(\"%Y%m%d%H\"), wht.strftime(\"%Y%m%d%H\")) recs", "cond = \"YEAR == {0} & MON == {1}\".format(year, mon) recs = db.query(cond)", "precipation valid_prec = recs.query(\"500 > PREC24 >= 0\") prec_mon = valid_prec[\"PREC24\"].sum() prec_cnt =", "stat_win): print(\"processing {0}\".format(srcPath)) db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATETIME\") result = [] #", "if prec12_pm_cnt == 0: prec12_pm = 999999 if stat_win == \"0808\" or stat_win", "loop y_series = db[\"YEAR\"] year_begin = y_series.min() year_end = y_series.max() + 1 for", "\"The Surf4Hours Tool convert surf files organized \\ by day into files organized", "cond = \"{0} < DATETIME <= {1}\".format(whf.strftime(\"%Y%m%d%H\"), wht.strftime(\"%Y%m%d%H\")) recs = db.query(cond) return recs", "\"sl0808\" dailyDir = os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly0808\") yearDir =", "> PREC24 >= 0\") prec_mon = valid_prec[\"PREC24\"].sum() prec_cnt = len(valid_prec) if prec_cnt ==", "valid_temperature = hours24.query(\"60> TEMP > -60 \\ & HR in [2, 8, 14,", "= date(y_series.min(), 1, 1) while(curDay < endDay): if stat_win == \"0808\": recs =", "\"monthly2020\") yearDir = os.path.join(targetRoot, subdir, \"year2020\") print(\"statistics qx2020\") self.statisticsDaily(bystationDir, dailyDir, \"2020\") self.statisticsMonthly(dailyDir, monthlyDir)", "prec_mon = valid_prec[\"PREC24\"].sum() prec_cnt = len(valid_prec) if prec_cnt == 0: prec_mon = 999999", ".format(sid, year, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec_year, prec_cnt, prec24_year, prec24_cnt) return", "srcPath, targetPath): db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATE\") result = [] # todo:", "curDay, recs, stat_win) if day_rec is not None: result.append(day_rec) curDay = curDay +", "IDs and statisics for daily and monthly.\"\"\" def __init__(self): ToolBase.__init__(self, \"Surf4HoursTool\", \"The Surf4Hours", "year, mon,)) # statistics precipation valid_prec = recs.query(\"500 > PREC24 >= 0\") prec_mon", "0, 0, 0) \\ - timedelta(hours=df_hours) wht = whf + timedelta(hours=24) cond =", "in group.items(): # target = os.path.join(targetRoot, k) # recs_w = [ # strfmt.format(k,", "== 24: # ok for 24 hours avg_temp = valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max()", "wht = whf + timedelta(hours=24) cond = \"{0} < DATETIME <= {1}\".format(whf.strftime(\"%Y%m%d%H\"), wht.strftime(\"%Y%m%d%H\"))", "len(valid_temperature) >= 10: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max() min_temp = valid_temperature[\"MIN_TEMP\"].min() else:", "valid_temperature = hours24.query(\"60 > TEMP > -60\") valid_temperature = hours24.query(\"60> TEMP > -60", "# with open(srcPath) as f: # recs = f.readlines() # recs = recs[1:]", "skip_blank_lines=True, delim_whitespace=True, index_col=\"DATE\") result = [] # todo: do config the range of", "print(\"statistics qx0808\") self.statisticsDaily(bystationDir, dailyDir, \"0808\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 20-20, qixiang subdir", ".format(\"SID\", \"DATE\", \"YEAR\", \"MON\", \"DAY\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\",", "statistics temperature # valid_temperature = hours24.query(\"60 > TEMP > -60\") valid_temperature = hours24.query(\"60>", "bystationDir = os.path.join(targetRoot, \"bystation\") # self.batchConvert(srcRoot, bystationDir) # 08-08, qixiang subdir = \"qx0808\"", "valid_temperature = recs.query(\"60 > AVG_TEMP > -60\") # print(valid_temperature) if len(valid_temperature) >= 24:", "with open(os.path.join(parentDir, item), 'r+') as fo: # recs = fo.readlines() # sample =", "# year = int(filename[:4]) # mon = int(filename[4:6]) # day = int(filename[6:8]) #", ".format(sid, dt.year, # dt.month, dt.day)) # statistics temperature # valid_temperature = hours24.query(\"60 >", "= valid_pressure[\"PRES\"].min() else: valid_pressure = hours24.query((\"1200> PRES > 600 \\ & HR in", "= 999999 max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station", "= recs.query(\"5000 > PREC24_MON >= 0\") prec24_year = valid_prec[\"PREC24_MON\"].sum() prec24_cnt = len(valid_prec) if", "self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in filelist: srcPath = os.path.join(srcPathRoot, item) targetPath", "pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATETIME\") result = [] # todo: do config the range", "timedelta(days=1) if stat_win == \"0808\" or stat_win == \"0832\": header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\")", "= valid_pressure[\"PRES\"].min() else: avg_pres = 999999 max_pres = 999999 min_pres = 999999 #", "targetPath = os.path.join(targetPathRoot, item) self.stasticsYearSingleStatation(item, srcPath, targetPath) def stasticsYearSingleStatation(self, sid, srcPath, targetPath): db", "avg_temp = 999999 max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}, Station {0}", "index = 0 # last_rec = len(recs) - 1 # while index <", "0: # statistics pressure valid_pressure = recs.query(\"1200 > AVG_PRES > 800\") if len(valid_pressure)", "dt.day, 0, 0, 0) \\ - timedelta(hours=df_hours) wht = whf + timedelta(hours=24) cond", "# sid = sample[0] # year = int(sample[2]) # mon = int(sample[3]) #", ">= 24: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max() min_temp = valid_temperature[\"MIN_TEMP\"].min() else: avg_temp", "endDay = date(y_series.max()+1, 1, 1) curDay = date(y_series.min(), 1, 1) while(curDay < endDay):", "> 600 \\ & HR in [2, 8, 14, 20]\")) if len(valid_pressure) ==", "convert surf files orgarnized by month into files by station IDs and statisics", "# 999999, 999999, 999999) # for i in range(24)] # for line in", "== \"0832\": header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\", \"DATE\", \"YEAR\", \"MON\", \"DAY\", \"AVG_PRES\",", "nextday = today # fo.seek(0) # fo.write(header) # index = 0 # last_rec", "self.queryData(db, curDay, -8) # if not recs.empty: day_rec = self.calcDaily(sid, curDay, recs, stat_win)", "== {0} & MON == {1}\".format(year, mon) recs = db.query(cond) if not recs.empty:", "today: # fo.writelines(recs[index: index+24]) # index = index + 24 # if index", "day) # nextday = today # fo.seek(0) # fo.write(header) # index = 0", "mon, day) # nextday = today # fo.seek(0) # fo.write(header) # index =", "& MON == {1}\".format(year, mon) recs = db.query(cond) if not recs.empty: mon_rec =", "avg_pres = 999999 max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}, \" #", "'a') as fo: # fo.writelines(recs_w) # fo.close() # def insertHeader(self, parentDir): # header", "targetRoot) bystationDir = os.path.join(targetRoot, \"bystation\") # self.batchConvert(srcRoot, bystationDir) # 08-08, qixiang subdir =", "parser.add_argument(\"source\", action=\"store\", # help=\"root dir for source files\") parser.add_argument(\"target\", action=\"store\", help=\"root dir for", "= \"qx2020\" dailyDir = os.path.join(targetRoot, subdir, \"daily2020\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly2020\") yearDir", "help=\"root dir for source files\") parser.add_argument(\"target\", action=\"store\", help=\"root dir for all data\") def", "= 999999 rec = (\"{:>8}{:>6}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>4d}{:>10.1f}{:>6d}\\n\") \\ .format(sid, year, avg_pres, max_pres, min_pres,", "day, i, # 999999, 999999, 999999) # for i in range(24)] # for", "# day = int(sample[4]) # today = date(year, mon, day) # nextday =", "print(\"An exception occurred\", line, items) # with open(target, 'a') as fo: # fo.writelines(recs_w)", "# header = (\"{0:>8}{1:>12}{2:>6}{3:>4}{4:>4}{5:>4}\" # \"{6:>12}{7:>12}{8:>12}\\n\").format( # \"SID\", \"DATETIME\", \"YEAR\", # \"MON\", \"DAY\",", "delim_whitespace=True) result = [] # todo: do config the range of loop y_series", "1, 1) curDay = date(y_series.min(), 1, 1) while(curDay < endDay): if stat_win ==", "into files by station IDs and statisics for daily and monthly.\"\"\" def __init__(self):", "& HR in [2, 8, 14, 20]\") if len(valid_temperature) == 4: avg_temp =", "[2, 8, 14, 20]\") # print(valid_temperature) if len(valid_temperature) == 24: # ok for", "for year in range(year_begin, year_end): for mon in range(1, 13): cond = \"YEAR", "year = int(filename[:4]) # mon = int(filename[4:6]) # day = int(filename[6:8]) # recs", "daily and monthly.\") self._version = \"surf4hourstool.py 0.0.1\" def defineArgumentParser(self, parser): # parser.add_argument(\"source\", action=\"store\",", "valid_pressure[\"MIN_PRES\"].min() else: avg_pres = 999999 max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1},", "Tool convert surf files organized \\ by day into files organized by station.", "and monthly.\") self._version = \"surf4hourstool.py 0.0.1\" def defineArgumentParser(self, parser): # parser.add_argument(\"source\", action=\"store\", #", "is not None: result.append(day_rec) curDay = curDay + timedelta(days=1) if stat_win == \"0808\"", "\\ & HR in [2, 8, 14, 20]\")) if len(valid_pressure) == 4: avg_pres", "in filelist: # with open(os.path.join(parentDir, item), 'r+') as fo: # recs = fo.readlines()", "in [2, 8, 14, 20]\")) if len(valid_pressure) == 4: avg_pres = valid_pressure[\"PRES\"].mean() max_pres", "if not os.path.exists(srcPath): # self._loggej.info(\"Failed: {0} does't existe\".format(srcPath)) # filename = os.path.basename(srcPath) #", "year = today.year # mon = today.month # day = today.day # recs_empty", "prec24_cnt, prec12_am, prec12_am_cnt, prec12_pm, prec12_pm_cnt) return rec def statisticsMonthly(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist", "1 # year_begin = 2015 # year_end = 2016 for year in range(year_begin,", "recs = self.queryData(db, curDay, -8) # if not recs.empty: day_rec = self.calcDaily(sid, curDay,", "temperature\") # .format(sid, year, mon,)) # statistics precipation valid_prec = recs.query(\"500 > PREC24", "= valid_temperature[\"MIN_TEMP\"].min() else: avg_temp = 999999 max_temp = 999999 min_temp = 999999 #", "valid_temperature[\"MIN_TEMP\"].min() else: avg_temp = 999999 max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d},", "# with open(target, 'a') as fo: # fo.writelines(recs_w) # fo.close() # def insertHeader(self,", "or stat_win == \"0832\": header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\", \"DATE\", \"YEAR\", \"MON\",", "f.close() # group = {} # strfmt = (\"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\")", "not recs.empty: day_rec = self.calcDaily(sid, curDay, recs, stat_win) if day_rec is not None:", "year = int(sample[2]) # mon = int(sample[3]) # day = int(sample[4]) # nextday", "\"PREC20_08\", \"C2\") else: header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\", \"DATE\", \"YEAR\", \"MON\", \"DAY\",", "dt.year, # dt.month, dt.day)) # statistics temperature # valid_temperature = hours24.query(\"60 > TEMP", "# recs_empty = [ # strfmt.format( # sid, year, mon, day, i, #", "monthlyDir = os.path.join(targetRoot, subdir, \"monthly2020\") yearDir = os.path.join(targetRoot, subdir, \"year2020\") print(\"statistics qx2020\") self.statisticsDaily(bystationDir,", "= os.path.join(targetRoot, k) # recs_w = [ # strfmt.format(k, year, mon, day, i,", "len(valid_prec) if prec_cnt == 0: prec_year = 999999 valid_prec = recs.query(\"5000 > PREC24_MON", "08-08, shuili subdir = \"sl0808\" dailyDir = os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir = os.path.join(targetRoot,", "int(sample[4]) # today = date(year, mon, day) # nextday = today # fo.seek(0)", "sid, year, mon, recs): if len(recs) > 0: # statistics pressure valid_pressure =", "prec12_am_cnt, prec12_pm, prec12_pm_cnt) return rec def statisticsMonthly(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot)", "= 999999 max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}, \" # \"Station", "return rec def clearDirectory(self, targetRoot): if os.path.exists(targetRoot) and len(os.listdir(targetRoot)) > 0: print(\"\\nThe dir", "args): # srcRoot = args.source targetRoot = args.target # print(srcRoot, \"-->\", targetRoot) bystationDir", "max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0} miss", "# nextday = date(year, mon, day) # else: # strfmt = ( #", "len(valid_pressure) >= 10: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max() min_pres = valid_pressure[\"MIN_PRES\"].min() else:", "# # MORE INFO ... # email: import os import shutil import time", "\"MON\", \"DAY\", \"HR\", # \"PRES\", \"TEMP\", \"PREC\") # filelist = sorted(os.listdir(parentDir)) # print(filelist)", "\" # \"Station {0} miss pressure at\" # \"[02, 08, 14, 20]\") #", "= (\"{:>8}{:>6}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}{:>4d}\" \"{:>12.1f}{:>6d}\\n\") \\ .format(sid, year, mon, avg_pres, max_pres, min_pres, avg_temp, max_temp,", "def insertHeader(self, parentDir): # header = (\"{0:>8}{1:>12}{2:>6}{3:>4}{4:>4}{5:>4}\" # \"{6:>12}{7:>12}{8:>12}\\n\").format( # \"SID\", \"DATETIME\", \"YEAR\",", "= pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATETIME\") result = [] # todo: do config the", "# mon = int(sample[3]) # day = int(sample[4]) # today = date(year, mon,", "for item in filelist: srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsMonthSingleStatation(item,", "= (\"{:>8}{:>6}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>12}{:>6}\\n\").format( \"SID\", \"YEAR\", \"MON\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_MON\",", "self.statisticsYears(monthlyDir, yearDir) # def batchConvert(self, srcPathRoot, targetPathRoot): # self.clearDirectory(targetPathRoot) # filelist = sorted(os.listdir(srcPathRoot))", "\"0808\" or stat_win == \"0832\": rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid, dt.strftime(\"%Y%m%d\"),", "\"year2020\") print(\"statistics qx2020\") self.statisticsDaily(bystationDir, dailyDir, \"2020\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 08-08, shuili", "parser = argparse.ArgumentParser(prog=\"python -m surf4hourstool\", description=\"Surf4HoursTool Usage Guide\", prefix_chars=\"-+\") parser.add_argument(\"--version\", action=\"version\", version=\"%(prog)s 0.0.1\")", "range(year_begin, year_end): cond = \"YEAR == {0}\".format(year) recs = db.query(cond) if not recs.empty:", "to convert surf files orgarnized by month into files by station IDs and", "srcPath, targetPath) def stasticsYearSingleStatation(self, sid, srcPath, targetPath): db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True) result", "[] # todo: do config the range of loop y_series = db[\"YEAR\"] year_begin", "max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0} miss", "range(24)] # for line in v: # items = line.split() # # try:", "targetRoot): # if not os.path.exists(srcPath): # self._loggej.info(\"Failed: {0} does't existe\".format(srcPath)) # filename =", "sys # print(sys.argv) tool = Surf4HoursTool() import argparse from ..base.logger import Logger parser", "# .format(sid, year)) # statistics precipation valid_prec = recs.query(\"5000 > PREC_MON >= 0\")", "index > last_rec: # break # sample = recs[index].split() # sid = sample[0]", "os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsYearSingleStatation(item, srcPath, targetPath) def stasticsYearSingleStatation(self, sid, srcPath,", "# last_rec = len(recs) - 1 # while index < last_rec: # if", "year, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec_year, prec_cnt, prec24_year, prec24_cnt) return rec", "self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # def batchConvert(self, srcPathRoot, targetPathRoot): # self.clearDirectory(targetPathRoot) # filelist", "= len(valid_prec) if prec24_year == 0: prec24_year = 999999 rec = (\"{:>8}{:>6}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}\"", "year, mon, recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>12}{:>6}\\n\").format( \"SID\", \"YEAR\", \"MON\", \"AVG_PRES\", \"MAX_PRES\",", "max_temp, min_temp, prec_year, prec_cnt, prec24_year, prec24_cnt) return rec def clearDirectory(self, targetRoot): if os.path.exists(targetRoot)", "os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics sl0808\") self.statisticsDaily(bystationDir, dailyDir, \"0832\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) #", "stat_win == \"2020\": recs = self.queryData(db, curDay, 4) else: recs = self.queryData(db, curDay,", "os.path.join(targetPathRoot, item) self.stasticsDailySingleStatation(item, srcPath, targetPath, stat_win) def stasticsDailySingleStatation(self, sid, srcPath, targetPath, stat_win): print(\"processing", "= valid_pressure[\"MIN_PRES\"].min() else: avg_pres = 999999 max_pres = 999999 min_pres = 999999 #", "rec def statisticsYears(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in filelist:", "= [] # todo: do config the range of loop y_series = db[\"YEAR\"]", "# float(items[7])) # group[items[0]].append(rec) # for k, v in group.items(): # target =", "# recs = f.readlines() # recs = recs[1:] # f.close() # group =", "index < last_rec: # if nextday == today: # fo.writelines(recs[index: index+24]) # index", "-60\") valid_temperature = hours24.query(\"60> TEMP > -60 \\ & HR in [2, 8,", "fo.seek(0) # fo.write(header) # index = 0 # last_rec = len(recs) - 1", "999999 min_pres = 999999 # self._logger.error((\"{1}, Station {0} miss pressure.\") # .format(sid, year))", "into files organized by station. and \\ statisics for daily and monthly.\") self._version", "int(sample[4]) # nextday = date(year, mon, day) # else: # strfmt = (", "= db.query(cond) if not recs.empty: mon_rec = self.calcMonthly(sid, year, mon, recs) result.append(mon_rec) header", "\"Station {0} miss pressure at\" # \"[02, 08, 14, 20]\") # .format(sid, dt.year,", "rec.split(\",\") # if items[0] not in group: # group[items[0]] = [] # if", "08, 14, 20]\") # .format(sid, dt.year, # dt.month, dt.day)) # statistics precipation valid_prec", "999999 max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}, Station {0} miss temperature\")", "= 999999 min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0} miss pressure", "[ # strfmt.format( # sid, year, mon, day, i, # 999999, 999999, 999999)", "today = date(year, mon, day) # nextday = today # fo.seek(0) # fo.write(header)", "hours24.query(\"60 > TEMP > -60\") valid_temperature = hours24.query(\"60> TEMP > -60 \\ &", "prec24 = 999999 am_prec = valid_prec.query(\"HR <=8 | HR>20\") pm_prec = valid_prec.query(\"8 <", "year_begin = 2015 # year_end = 2016 for year in range(year_begin, year_end): cond", "MON == {1}\".format(year, mon) recs = db.query(cond) if not recs.empty: mon_rec = self.calcMonthly(sid,", "else: avg_temp = 999999 max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}, Station", "# # print(\"An exception occurred\", line, items) # with open(target, 'a') as fo:", "is not empty and will been overrided.\" .format(targetRoot)) shutil.rmtree(targetRoot, True) time.sleep(1) if not", "0\") prec_mon = valid_prec[\"PREC24\"].sum() prec_cnt = len(valid_prec) if prec_cnt == 0: prec_mon =", "dailyDir, \"2020\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 08-08, shuili subdir = \"sl0808\" dailyDir", "# ok for 24 hours avg_pres = valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres =", "\"0808\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 20-20, qixiang subdir = \"qx2020\" dailyDir =", "prec_cnt == 0: prec_mon = 999999 valid_prec = recs.query(\"500 > PREC24 >= 0", "[] # todo: do config the range of loop y_series = db[\"YEAR\"] endDay", "not recs.empty: mon_rec = self.calcYear(sid, year, recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>6}\\n\").format( \"SID\",", "for i in range(24)] # for line in v: # items = line.split()", "dt, df_hours=4): whf = datetime(dt.year, dt.month, dt.day, 0, 0, 0) \\ - timedelta(hours=df_hours)", "float(items[6]), # float(items[7])) # group[items[0]].append(rec) # for k, v in group.items(): # target", "= os.path.join(targetRoot, \"bystation\") # self.batchConvert(srcRoot, bystationDir) # 08-08, qixiang subdir = \"qx0808\" dailyDir", "= valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else: avg_pres = 999999 max_pres = 999999 min_pres", "item) targetPath = os.path.join(targetPathRoot, item) self.stasticsMonthSingleStatation(item, srcPath, targetPath) def stasticsMonthSingleStatation(self, sid, srcPath, targetPath):", "sid, dt, hours24, stat_win): \"\"\" http://www.szmb.gov.cn/quf/2009/08/2017101815192310488.pdf \"\"\" if (len(hours24) > 24): self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \\", "as fo: fo.write(header) fo.writelines(result) fo.close() def calcMonthly(self, sid, year, mon, recs): if len(recs)", "tool is designed to convert surf files orgarnized by month into files by", "result.append(mon_rec) header = (\"{:>8}{:>6}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>12}{:>6}\\n\").format( \"SID\", \"YEAR\", \"MON\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\",", "db.query(cond) if not recs.empty: mon_rec = self.calcYear(sid, year, recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>10}{:>10}{:>10}{:>10}\"", "dt.year, dt.month, dt.day, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec24, prec24_cnt, prec12_pm, prec12_pm_cnt,", "= self.calcYear(sid, year, recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>6}\\n\").format( \"SID\", \"YEAR\", \"AVG_PRES\", \"MAX_PRES\",", "year = int(sample[2]) # mon = int(sample[3]) # day = int(sample[4]) # today", "# else: # strfmt = ( # \"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") #", "miss temperature\") # .format(sid, year)) # statistics precipation valid_prec = recs.query(\"5000 > PREC_MON", "prec24_year = valid_prec[\"PREC24_MON\"].sum() prec24_cnt = len(valid_prec) if prec24_year == 0: prec24_year = 999999", "sid, srcPath, targetPath, stat_win): print(\"processing {0}\".format(srcPath)) db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATETIME\") result", "source files\") parser.add_argument(\"target\", action=\"store\", help=\"root dir for all data\") def run(self, args): #", "print(srcRoot, \"-->\", targetRoot) bystationDir = os.path.join(targetRoot, \"bystation\") # self.batchConvert(srcRoot, bystationDir) # 08-08, qixiang", "targetPath, stat_win): print(\"processing {0}\".format(srcPath)) db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATETIME\") result = []", "curDay = curDay + timedelta(days=1) if stat_win == \"0808\" or stat_win == \"0832\":", "db[\"YEAR\"] endDay = date(y_series.max()+1, 1, 1) curDay = date(y_series.min(), 1, 1) while(curDay <", "\"DATETIME\", \"YEAR\", # \"MON\", \"DAY\", \"HR\", # \"PRES\", \"TEMP\", \"PREC\") # filelist =", "( # \"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # year = today.year # mon", "subdir, \"monthly0808\") yearDir = os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics sl0808\") self.statisticsDaily(bystationDir, dailyDir, \"0832\") self.statisticsMonthly(dailyDir,", "recs = [] # with open(srcPath) as f: # recs = f.readlines() #", "AVG_PRES > 800\") if len(valid_pressure) >= 10: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max()", "os.path.join(targetRoot, \"bystation\") # self.batchConvert(srcRoot, bystationDir) # 08-08, qixiang subdir = \"qx0808\" dailyDir =", "qx0808\") self.statisticsDaily(bystationDir, dailyDir, \"0808\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 20-20, qixiang subdir =", "# ok for 24 hours avg_temp = valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp =", "= 999999 am_prec = valid_prec.query(\"HR <=8 | HR>20\") pm_prec = valid_prec.query(\"8 < HR", "999999 # self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0} miss temperature\") # .format(sid, year, mon,))", "avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max() min_temp = valid_temperature[\"MIN_TEMP\"].min() else: avg_temp = 999999", "import date from datetime import timedelta, datetime import pandas as pd from ..base.toolbase", "if day_rec is not None: result.append(day_rec) curDay = curDay + timedelta(days=1) if stat_win", "else: header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\", \"DATE\", \"YEAR\", \"MON\", \"DAY\", \"AVG_PRES\", \"MAX_PRES\",", "miss temperature at\" # \"[02, 08, 14, 20]\") # .format(sid, dt.year, # dt.month,", "pressure valid_pressure = recs.query(\"1200 > AVG_PRES > 800\") if len(valid_pressure) >= 10: avg_pres", "y_series.min() year_end = y_series.max() + 1 # year_begin = 2015 # year_end =", "= 999999 max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}, \" # \"Station", "\"2020\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 08-08, shuili subdir = \"sl0808\" dailyDir =", "# sample = recs[index].split() # sid = sample[0] # year = int(sample[2]) #", "= (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\", \"DATE\", \"YEAR\", \"MON\", \"DAY\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\",", "date(year, mon, day) # else: # strfmt = ( # \"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\"", "999999 min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0} miss pressure at\"", "as fo: fo.write(header) fo.writelines(result) fo.close() def calcYear(self, sid, year, recs): if len(recs) >", "0: print(\"\\nThe dir of {0} is not empty and will been overrided.\" .format(targetRoot))", "from datetime import date from datetime import timedelta, datetime import pandas as pd", "tool.defineArgumentParser(parser) args = parser.parse_args() print(args) logger = Logger(\"./log/d2s.log\") tool.attachLogger(logger) targetRoot = args.target tool.run(args)", "float(items[5]), float(items[6]), # float(items[7])) # group[items[0]].append(rec) # for k, v in group.items(): #", "sorted(os.listdir(srcPathRoot)) # for item in filelist: # srcPath = os.path.join(srcPathRoot, item) # print(srcPath)", "def stasticsDailySingleStatation(self, sid, srcPath, targetPath, stat_win): print(\"processing {0}\".format(srcPath)) db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True,", ">= 0\") prec_mon = valid_prec[\"PREC24\"].sum() prec_cnt = len(valid_prec) if prec_cnt == 0: prec_mon", "recs.empty: mon_rec = self.calcMonthly(sid, year, mon, recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>12}{:>6}\\n\").format( \"SID\",", "# .format(sid, year)) # statistics temperature valid_temperature = recs.query(\"60 > AVG_TEMP > -60\")", "\\ .format(\"SID\", \"DATE\", \"YEAR\", \"MON\", \"DAY\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\",", "if (len(hours24) > 24): self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \\ Station {0} has more than 24 records", "# .format(sid, year, mon)) # statistics temperature valid_temperature = recs.query(\"60 > AVG_TEMP >", "999999 # self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0} miss pressure.\") # .format(sid, year, mon))", "# print(valid_temperature) if len(valid_temperature) == 24: # ok for 24 hours avg_temp =", "{1}\".format(year, mon) recs = db.query(cond) if not recs.empty: mon_rec = self.calcMonthly(sid, year, mon,", "day = today.day # recs_empty = [ # strfmt.format( # sid, year, mon,", "v in group.items(): # target = os.path.join(targetRoot, k) # recs_w = [ #", "(\"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # for rec in recs: # items =", "> 0: # statistics pressure valid_pressure = recs.query(\"1200 > AVG_PRES > 800\") #", "0 & CNT == 24\") prec24_mon = valid_prec[\"PREC24\"].sum() prec24_cnt = len(valid_prec) if prec24_cnt", "# for i in range(24)] # for line in v: # items =", "has more than 24 records on\").format( sid, dt.year, dt.month, dt.day)) else: # statistics", "i, # 999999, 999999, 999999) # for i in range(24)] # fo.writelines(recs_emt #", "999999 max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0}", "pd from ..base.toolbase import ToolBase class Surf4HoursTool(ToolBase): \"\"\"The tool is designed to convert", "mon)) # statistics temperature valid_temperature = recs.query(\"60 > AVG_TEMP > -60\") # print(valid_temperature)", "{0} miss temperature at\" # \"[02, 08, 14, 20]\") # .format(sid, dt.year, #", "filelist = sorted(os.listdir(parentDir)) # print(filelist) # for item in filelist: # with open(os.path.join(parentDir,", "= date(y_series.max()+1, 1, 1) curDay = date(y_series.min(), 1, 1) while(curDay < endDay): if", "# recs_w = [ # strfmt.format(k, year, mon, day, i, # 999999, 999999,", "mon, day) # else: # strfmt = ( # \"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" #", "prefix_chars=\"-+\") parser.add_argument(\"--version\", action=\"version\", version=\"%(prog)s 0.0.1\") tool.defineArgumentParser(parser) args = parser.parse_args() print(args) logger = Logger(\"./log/d2s.log\")", "os.path.exists(srcPath): # self._loggej.info(\"Failed: {0} does't existe\".format(srcPath)) # filename = os.path.basename(srcPath) # year =", "pressure valid_pressure = recs.query(\"1200 > AVG_PRES > 800\") # print(valid_pressure) if len(valid_pressure) >=", "len(am_prec) if prec12_am_cnt == 0: prec12_am = 999999 prec12_pm = pm_prec[\"PREC\"].sum() prec12_pm_cnt =", "> 0: print(\"\\nThe dir of {0} is not empty and will been overrided.\"", "= os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsMonthSingleStatation(item, srcPath, targetPath) def stasticsMonthSingleStatation(self, sid,", "ok for 24 hours avg_temp = valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min()", "print(filelist) # for item in filelist: # with open(os.path.join(parentDir, item), 'r+') as fo:", "strfmt = (\"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # for rec in recs: #", "== 0: prec24_year = 999999 rec = (\"{:>8}{:>6}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>4d}{:>10.1f}{:>6d}\\n\") \\ .format(sid, year,", "valid_prec = recs.query(\"500 > PREC24 >= 0 & CNT == 24\") prec24_mon =", "os.path.join(targetRoot, subdir, \"year2020\") print(\"statistics qx2020\") self.statisticsDaily(bystationDir, dailyDir, \"2020\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) #", "print(\"\\nThe dir of {0} is not empty and will been overrided.\" .format(targetRoot)) shutil.rmtree(targetRoot,", "= (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid, dt.strftime(\"%Y%m%d\"), dt.year, dt.month, dt.day, avg_pres, max_pres, min_pres,", "= y_series.min() year_end = y_series.max() + 1 for year in range(year_begin, year_end): for", "prec_mon, prec_cnt, prec24_mon, prec24_cnt) return rec def statisticsYears(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist =", "\\ .format(sid, year, mon, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec_mon, prec_cnt, prec24_mon,", "valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max() min_pres = valid_pressure[\"MIN_PRES\"].min() else: avg_pres = 999999 max_pres =", "\"\"\" http://www.szmb.gov.cn/quf/2009/08/2017101815192310488.pdf \"\"\" if (len(hours24) > 24): self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \\ Station {0} has more", "(\"{:>8}{:>6}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>4d}{:>10.1f}{:>6d}\\n\") \\ .format(sid, year, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec_year,", "line # # except: # # print(\"An exception occurred\", line, items) # with", "def statisticsDaily(self, srcPathRoot, targetPathRoot, stat_win): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in filelist:", "with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def queryData(self, db, dt, df_hours=4):", "mon = int(filename[4:6]) # day = int(filename[6:8]) # recs = [] # with", "os.path.join(targetPathRoot, item) self.stasticsYearSingleStatation(item, srcPath, targetPath) def stasticsYearSingleStatation(self, sid, srcPath, targetPath): db = pd.read_table(srcPath,", "monthlyDir = os.path.join(targetRoot, subdir, \"monthly0808\") yearDir = os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics sl0808\") self.statisticsDaily(bystationDir,", "# fo.seek(0) # fo.write(header) # index = 0 # last_rec = len(recs) -", "shuili subdir = \"sl0808\" dailyDir = os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir = os.path.join(targetRoot, subdir,", "valid_pressure = recs.query(\"1200 > AVG_PRES > 800\") if len(valid_pressure) >= 10: avg_pres =", ".format(sid, year)) # statistics precipation valid_prec = recs.query(\"5000 > PREC_MON >= 0\") prec_year", "for all data\") def run(self, args): # srcRoot = args.source targetRoot = args.target", "\"[02, 08, 14, 20]\") # .format(sid, dt.year, # dt.month, dt.day)) # statistics temperature", "in filelist: srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsDailySingleStatation(item, srcPath, targetPath,", "for k, v in group.items(): # target = os.path.join(targetRoot, k) # recs_w =", "# statistics precipation valid_prec = recs.query(\"5000 > PREC_MON >= 0\") prec_year = valid_prec[\"PREC_MON\"].sum()", "..base.logger import Logger parser = argparse.ArgumentParser(prog=\"python -m surf4hourstool\", description=\"Surf4HoursTool Usage Guide\", prefix_chars=\"-+\") parser.add_argument(\"--version\",", "999999, 999999) # for i in range(24)] # for line in v: #", "\"{6:>12}{7:>12}{8:>12}\\n\").format( # \"SID\", \"DATETIME\", \"YEAR\", # \"MON\", \"DAY\", \"HR\", # \"PRES\", \"TEMP\", \"PREC\")", "recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>12}{:>6}\\n\").format( \"SID\", \"YEAR\", \"MON\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\",", "item) self.stasticsYearSingleStatation(item, srcPath, targetPath) def stasticsYearSingleStatation(self, sid, srcPath, targetPath): db = pd.read_table(srcPath, skip_blank_lines=True,", "Station {0} has more than 24 records on\").format( sid, dt.year, dt.month, dt.day)) else:", "# mon = today.month # day = today.day # recs_empty = [ #", "dt, hours24, stat_win): \"\"\" http://www.szmb.gov.cn/quf/2009/08/2017101815192310488.pdf \"\"\" if (len(hours24) > 24): self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \\ Station", "= db.query(cond) return recs def calcDaily(self, sid, dt, hours24, stat_win): \"\"\" http://www.szmb.gov.cn/quf/2009/08/2017101815192310488.pdf \"\"\"", "statisics for daily and monthly.\") self._version = \"surf4hourstool.py 0.0.1\" def defineArgumentParser(self, parser): #", "max_temp, min_temp, prec_mon, prec_cnt, prec24_mon, prec24_cnt) return rec def statisticsYears(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot)", "# dt.month, dt.day)) # statistics precipation valid_prec = hours24.query(\"200 > PREC >= 0\")", "year_end = 2016 for year in range(year_begin, year_end): cond = \"YEAR == {0}\".format(year)", "\"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC20_08\", \"C1\", \"PREC08_20\", \"C2\") with open(targetPath, 'w') as fo:", "dailyDir, \"0808\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 20-20, qixiang subdir = \"qx2020\" dailyDir", "# \"Station {0} miss temperature\") # .format(sid, year, mon,)) # statistics precipation valid_prec", "else: # strfmt = ( # \"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # year", "\"YEAR\", \"MON\", \"DAY\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC08_20\", \"C1\",", "not None: result.append(day_rec) curDay = curDay + timedelta(days=1) if stat_win == \"0808\" or", "\"qx2020\" dailyDir = os.path.join(targetRoot, subdir, \"daily2020\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly2020\") yearDir =", "# \"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # year = today.year # mon =", "or stat_win == \"0832\": rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid, dt.strftime(\"%Y%m%d\"), dt.year,", "organized by station. and \\ statisics for daily and monthly.\") self._version = \"surf4hourstool.py", "igsnrr # # MORE INFO ... # email: import os import shutil import", "# for item in filelist: # srcPath = os.path.join(srcPathRoot, item) # print(srcPath) #", "in [2, 8, 14, 20]\")) # print(valid_pressure) if len(valid_pressure) == 24: # ok", "item) targetPath = os.path.join(targetPathRoot, item) self.stasticsYearSingleStatation(item, srcPath, targetPath) def stasticsYearSingleStatation(self, sid, srcPath, targetPath):", "\"CNT24\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def calcYear(self, sid, year,", "0.0.1\") tool.defineArgumentParser(parser) args = parser.parse_args() print(args) logger = Logger(\"./log/d2s.log\") tool.attachLogger(logger) targetRoot = args.target", "# 999999, 999999, 999999) # for i in range(24)] # fo.writelines(recs_emt # today", "statisticsDaily(self, srcPathRoot, targetPathRoot, stat_win): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in filelist: srcPath", "-60\") if len(valid_temperature) >= 10: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max() min_temp =", "i in range(24)] # for line in v: # items = line.split() #", "as fo: fo.write(header) fo.writelines(result) fo.close() def queryData(self, db, dt, df_hours=4): whf = datetime(dt.year,", "avg_temp = 999999 max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" #", "year, recs): if len(recs) > 0: # statistics pressure valid_pressure = recs.query(\"1200 >", "# recs_w[int(items[5])] = line # # except: # # print(\"An exception occurred\", line,", "-*- # COPYRIGHT 2016 igsnrr # # MORE INFO ... # email: import", "sample = recs[0].split() # sid = sample[0] # year = int(sample[2]) # mon", "999999) # for i in range(24)] # fo.writelines(recs_emt # today = today +", "= index + 24 # if index > last_rec: # break # sample", "20]\")) # print(valid_pressure) if len(valid_pressure) == 24: # ok for 24 hours avg_pres", "int(sample[3]) # day = int(sample[4]) # nextday = date(year, mon, day) # else:", "from ..base.toolbase import ToolBase class Surf4HoursTool(ToolBase): \"\"\"The tool is designed to convert surf", "filelist = sorted(os.listdir(srcPathRoot)) # for item in filelist: # srcPath = os.path.join(srcPathRoot, item)", "\"YEAR\", # \"MON\", \"DAY\", \"HR\", # \"PRES\", \"TEMP\", \"PREC\") # filelist = sorted(os.listdir(parentDir))", "0\") prec_year = valid_prec[\"PREC_MON\"].sum() prec_cnt = len(valid_prec) if prec_cnt == 0: prec_year =", "max_pres = valid_pressure[\"MAX_PRES\"].max() min_pres = valid_pressure[\"MIN_PRES\"].min() else: avg_pres = 999999 max_pres = 999999", "= recs.query(\"60 > AVG_TEMP > -60\") if len(valid_temperature) >= 10: avg_temp = valid_temperature[\"AVG_TEMP\"].mean()", "int(items[4]), # float(items[5]), float(items[6]), # float(items[7])) # group[items[0]].append(rec) # for k, v in", "# sample = recs[0].split() # sid = sample[0] # year = int(sample[2]) #", "\"CNT\", \"PREC24_MON\", \"CNT24\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def calcMonthly(self,", "800\") # print(valid_pressure) if len(valid_pressure) >= 24: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max()", ">= 0\") prec_year = valid_prec[\"PREC_MON\"].sum() prec_cnt = len(valid_prec) if prec_cnt == 0: prec_year", "24 hours avg_pres = valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else: valid_pressure", "24\") prec24_mon = valid_prec[\"PREC24\"].sum() prec24_cnt = len(valid_prec) if prec24_cnt == 0: prec24_mon =", "year_end): for mon in range(1, 13): cond = \"YEAR == {0} & MON", "# print(filelist) # for item in filelist: # with open(os.path.join(parentDir, item), 'r+') as", "= today + timedelta(days=1) # fo.flush() # fo.close() def statisticsDaily(self, srcPathRoot, targetPathRoot, stat_win):", "[2, 8, 14, 20]\")) # print(valid_pressure) if len(valid_pressure) == 24: # ok for", "valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else: avg_temp = 999999 max_temp =", "day_rec = self.calcDaily(sid, curDay, recs, stat_win) if day_rec is not None: result.append(day_rec) curDay", "orgarnized by month into files by station IDs and statisics for daily and", "0: prec12_am = 999999 prec12_pm = pm_prec[\"PREC\"].sum() prec12_pm_cnt = len(pm_prec) if prec12_pm_cnt ==", "prec_year = valid_prec[\"PREC_MON\"].sum() prec_cnt = len(valid_prec) if prec_cnt == 0: prec_year = 999999", "== {0}\".format(year) recs = db.query(cond) if not recs.empty: mon_rec = self.calcYear(sid, year, recs)", "AVG_PRES > 800\") # print(valid_pressure) if len(valid_pressure) >= 24: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres", "fo.writelines(result) fo.close() def queryData(self, db, dt, df_hours=4): whf = datetime(dt.year, dt.month, dt.day, 0,", "year, mon)) # statistics temperature valid_temperature = recs.query(\"60 > AVG_TEMP > -60\") #", "prec12_am_cnt) else: rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid, dt.strftime(\"%Y%m%d\"), dt.year, dt.month, dt.day,", "utf-8 -*- # COPYRIGHT 2016 igsnrr # # MORE INFO ... # email:", "def calcYear(self, sid, year, recs): if len(recs) > 0: # statistics pressure valid_pressure", "int(items[1]), int(items[2]), # int(items[3]), int(items[4]), # float(items[5]), float(items[6]), # float(items[7])) # group[items[0]].append(rec) #", "max_pres, min_pres, avg_temp, max_temp, min_temp, prec24, prec24_cnt, prec12_pm, prec12_pm_cnt, prec12_am, prec12_am_cnt) else: rec", "os.path.join(targetRoot, subdir, \"daily2020\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly2020\") yearDir = os.path.join(targetRoot, subdir, \"year2020\")", "> -60\") valid_temperature = hours24.query(\"60> TEMP > -60 \\ & HR in [2,", "# fo.writelines(recs_w) # fo.close() # def insertHeader(self, parentDir): # header = (\"{0:>8}{1:>12}{2:>6}{3:>4}{4:>4}{5:>4}\" #", "# strfmt.format( # sid, year, mon, day, i, # 999999, 999999, 999999) #", "= int(sample[3]) # day = int(sample[4]) # nextday = date(year, mon, day) #", "# strfmt = (\"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # for rec in recs:", "valid_prec = recs.query(\"500 > PREC24 >= 0\") prec_mon = valid_prec[\"PREC24\"].sum() prec_cnt = len(valid_prec)", "of loop y_series = db[\"YEAR\"] year_begin = y_series.min() year_end = y_series.max() + 1", "def clearDirectory(self, targetRoot): if os.path.exists(targetRoot) and len(os.listdir(targetRoot)) > 0: print(\"\\nThe dir of {0}", "the range of loop y_series = db[\"YEAR\"] year_begin = y_series.min() year_end = y_series.max()", "fo: fo.write(header) fo.writelines(result) fo.close() def calcYear(self, sid, year, recs): if len(recs) > 0:", "\"Station {0} miss temperature at\" # \"[02, 08, 14, 20]\") # .format(sid, dt.year,", "dir for source files\") parser.add_argument(\"target\", action=\"store\", help=\"root dir for all data\") def run(self,", "\"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_Y\", \"CNT\", \"PREC24_Y\", \"CNT24\") with open(targetPath, 'w') as fo: fo.write(header)", "skip_blank_lines=True, delim_whitespace=True) result = [] # todo: do config the range of loop", "as f: # recs = f.readlines() # recs = recs[1:] # f.close() #", "\"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid, dt.strftime(\"%Y%m%d\"), dt.year, dt.month, dt.day, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp,", "last_rec: # break # sample = recs[index].split() # sid = sample[0] # year", "item in filelist: srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsYearSingleStatation(item, srcPath,", "prec12_am = am_prec[\"PREC\"].sum() prec12_am_cnt = len(am_prec) if prec12_am_cnt == 0: prec12_am = 999999", "item in filelist: srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsDailySingleStatation(item, srcPath,", "argparse.ArgumentParser(prog=\"python -m surf4hourstool\", description=\"Surf4HoursTool Usage Guide\", prefix_chars=\"-+\") parser.add_argument(\"--version\", action=\"version\", version=\"%(prog)s 0.0.1\") tool.defineArgumentParser(parser) args", "in group: # group[items[0]] = [] # if items[7] == \"999990\": # items[7]", ">= 0\") prec24 = valid_prec[\"PREC\"].sum() prec24_cnt = len(valid_prec) if prec24_cnt == 0: prec24", "# for i in range(24)] # fo.writelines(recs_emt # today = today + timedelta(days=1)", "max_temp = valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else: valid_temperature = hours24.query(\"60> TEMP > -60", "srcRoot = args.source targetRoot = args.target # print(srcRoot, \"-->\", targetRoot) bystationDir = os.path.join(targetRoot,", "sorted(os.listdir(parentDir)) # print(filelist) # for item in filelist: # with open(os.path.join(parentDir, item), 'r+')", "open(srcPath) as f: # recs = f.readlines() # recs = recs[1:] # f.close()", "code # import sys # print(sys.argv) tool = Surf4HoursTool() import argparse from ..base.logger", "\"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC20_08\", \"C1\", \"PREC08_20\", \"C2\") with open(targetPath, 'w') as", "max_pres, min_pres, avg_temp, max_temp, min_temp, prec24, prec24_cnt, prec12_am, prec12_am_cnt, prec12_pm, prec12_pm_cnt) return rec", "fo.writelines(result) fo.close() def calcMonthly(self, sid, year, mon, recs): if len(recs) > 0: #", "for line in v: # items = line.split() # # try: # recs_w[int(items[5])]", "def run(self, args): # srcRoot = args.source targetRoot = args.target # print(srcRoot, \"-->\",", "rec = (\"{:>8}{:>6}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>4d}{:>10.1f}{:>6d}\\n\") \\ .format(sid, year, avg_pres, max_pres, min_pres, avg_temp, max_temp,", "timedelta(hours=24) cond = \"{0} < DATETIME <= {1}\".format(whf.strftime(\"%Y%m%d%H\"), wht.strftime(\"%Y%m%d%H\")) recs = db.query(cond) return", "= 0 # last_rec = len(recs) - 1 # while index < last_rec:", "os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsDailySingleStatation(item, srcPath, targetPath, stat_win) def stasticsDailySingleStatation(self, sid,", "y_series = db[\"YEAR\"] year_begin = y_series.min() year_end = y_series.max() + 1 # year_begin", "<=8 | HR>20\") pm_prec = valid_prec.query(\"8 < HR <= 20\") prec12_am = am_prec[\"PREC\"].sum()", "= [] # if items[7] == \"999990\": # items[7] == \"0\" # rec", "& HR in [2, 8, 14, 20]\")) if len(valid_pressure) == 4: avg_pres =", "items = line.split() # # try: # recs_w[int(items[5])] = line # # except:", "\" # \"Station {0} miss temperature at\" # \"[02, 08, 14, 20]\") #", "year)) # statistics precipation valid_prec = recs.query(\"5000 > PREC_MON >= 0\") prec_year =", "subdir = \"qx2020\" dailyDir = os.path.join(targetRoot, subdir, \"daily2020\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly2020\")", "int(items[3]), int(items[4]), # float(items[5]), float(items[6]), # float(items[7])) # group[items[0]].append(rec) # for k, v", "len(os.listdir(targetRoot)) > 0: print(\"\\nThe dir of {0} is not empty and will been", "# for rec in recs: # items = rec.split(\",\") # if items[0] not", "recs.query(\"500 > PREC24 >= 0\") prec_mon = valid_prec[\"PREC24\"].sum() prec_cnt = len(valid_prec) if prec_cnt", "existe\".format(srcPath)) # filename = os.path.basename(srcPath) # year = int(filename[:4]) # mon = int(filename[4:6])", "f: # recs = f.readlines() # recs = recs[1:] # f.close() # group", "valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else: valid_temperature = hours24.query(\"60> TEMP >", "prec12_am, prec12_am_cnt) else: rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid, dt.strftime(\"%Y%m%d\"), dt.year, dt.month,", "8, 14, 20]\") if len(valid_temperature) == 4: avg_temp = valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max()", "if stat_win == \"0808\" or stat_win == \"0832\": header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\", "hours24.query(\"1200 > PRES > 600\") # temporary change valid_pressure = hours24.query((\"1200> PRES >", "# def insertHeader(self, parentDir): # header = (\"{0:>8}{1:>12}{2:>6}{3:>4}{4:>4}{5:>4}\" # \"{6:>12}{7:>12}{8:>12}\\n\").format( # \"SID\", \"DATETIME\",", "timedelta(hours=df_hours) wht = whf + timedelta(hours=24) cond = \"{0} < DATETIME <= {1}\".format(whf.strftime(\"%Y%m%d%H\"),", "\"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_MON\", \"CNT\", \"PREC24_MON\", \"CNT24\") with open(targetPath, 'w') as fo: fo.write(header)", "delim_whitespace=True, index_col=\"DATETIME\") result = [] # todo: do config the range of loop", "\"DAY\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC20_08\", \"C1\", \"PREC08_20\", \"C2\")", "for daily and monthly.\"\"\" def __init__(self): ToolBase.__init__(self, \"Surf4HoursTool\", \"The Surf4Hours Tool convert surf", "the range of loop y_series = db[\"YEAR\"] endDay = date(y_series.max()+1, 1, 1) curDay", "index + 24 # if index > last_rec: # break # sample =", "> AVG_TEMP > -60\") # print(valid_temperature) if len(valid_temperature) >= 24: avg_temp = valid_temperature[\"AVG_TEMP\"].mean()", "range(year_begin, year_end): for mon in range(1, 13): cond = \"YEAR == {0} &", "COPYRIGHT 2016 igsnrr # # MORE INFO ... # email: import os import", "# statistics temperature # valid_temperature = hours24.query(\"60 > TEMP > -60\") valid_temperature =", "datetime import timedelta, datetime import pandas as pd from ..base.toolbase import ToolBase class", "day = int(sample[4]) # today = date(year, mon, day) # nextday = today", "# temporary change valid_pressure = hours24.query((\"1200> PRES > 600 \\ & HR in", "temperature\") # .format(sid, year)) # statistics precipation valid_prec = recs.query(\"5000 > PREC_MON >=", "else: recs = self.queryData(db, curDay, -8) # if not recs.empty: day_rec = self.calcDaily(sid,", "v: # items = line.split() # # try: # recs_w[int(items[5])] = line #", "if items[0] not in group: # group[items[0]] = [] # if items[7] ==", "will been overrided.\" .format(targetRoot)) shutil.rmtree(targetRoot, True) time.sleep(1) if not os.path.exists(targetRoot): os.makedirs(targetRoot) if __name__", "dailyDir = os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly0808\") yearDir = os.path.join(targetRoot,", "len(recs) - 1 # while index < last_rec: # if nextday == today:", "+ 24 # if index > last_rec: # break # sample = recs[index].split()", "999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0} miss temperature at\" # \"[02, 08,", "fo: # fo.writelines(recs_w) # fo.close() # def insertHeader(self, parentDir): # header = (\"{0:>8}{1:>12}{2:>6}{3:>4}{4:>4}{5:>4}\"", "max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}, Station {0} miss temperature\") #", "args.source targetRoot = args.target # print(srcRoot, \"-->\", targetRoot) bystationDir = os.path.join(targetRoot, \"bystation\") #", "= int(sample[2]) # mon = int(sample[3]) # day = int(sample[4]) # nextday =", "all data\") def run(self, args): # srcRoot = args.source targetRoot = args.target #", "\"DATE\", \"YEAR\", \"MON\", \"DAY\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC08_20\",", "= os.path.join(srcPathRoot, item) # print(srcPath) # self.convert(srcPath, targetPathRoot) # self.insertHeader(targetPathRoot) # def convert(self,", "pm_prec = valid_prec.query(\"8 < HR <= 20\") prec12_am = am_prec[\"PREC\"].sum() prec12_am_cnt = len(am_prec)", "max_temp, min_temp, prec24, prec24_cnt, prec12_pm, prec12_pm_cnt, prec12_am, prec12_am_cnt) else: rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\"", "parser.parse_args() print(args) logger = Logger(\"./log/d2s.log\") tool.attachLogger(logger) targetRoot = args.target tool.run(args) else: print(\"loading day2stationtool", "8, 14, 20]\")) if len(valid_pressure) == 4: avg_pres = valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max()", "prec_year, prec_cnt, prec24_year, prec24_cnt) return rec def clearDirectory(self, targetRoot): if os.path.exists(targetRoot) and len(os.listdir(targetRoot))", "def batchConvert(self, srcPathRoot, targetPathRoot): # self.clearDirectory(targetPathRoot) # filelist = sorted(os.listdir(srcPathRoot)) # for item", "# group[items[0]] = [] # if items[7] == \"999990\": # items[7] == \"0\"", "< last_rec: # if nextday == today: # fo.writelines(recs[index: index+24]) # index =", "= [ # strfmt.format( # sid, year, mon, day, i, # 999999, 999999,", "db, dt, df_hours=4): whf = datetime(dt.year, dt.month, dt.day, 0, 0, 0) \\ -", "records on\").format( sid, dt.year, dt.month, dt.day)) else: # statistics pressure # valid_pressure =", "0: # statistics pressure valid_pressure = recs.query(\"1200 > AVG_PRES > 800\") # print(valid_pressure)", "self.stasticsMonthSingleStatation(item, srcPath, targetPath) def stasticsMonthSingleStatation(self, sid, srcPath, targetPath): db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True,", "valid_prec = hours24.query(\"200 > PREC >= 0\") prec24 = valid_prec[\"PREC\"].sum() prec24_cnt = len(valid_prec)", "self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 20-20, qixiang subdir = \"qx2020\" dailyDir = os.path.join(targetRoot,", "= \"YEAR == {0} & MON == {1}\".format(year, mon) recs = db.query(cond) if", "16) elif stat_win == \"2020\": recs = self.queryData(db, curDay, 4) else: recs =", "= self.calcDaily(sid, curDay, recs, stat_win) if day_rec is not None: result.append(day_rec) curDay =", "hours24.query(\"60> TEMP > -60 \\ & HR in [2, 8, 14, 20]\") if", "today.year # mon = today.month # day = today.day # recs_empty = [", "# self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0} miss pressure.\") # .format(sid, year, mon)) #", "\"C1\", \"PREC20_08\", \"C2\") else: header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\", \"DATE\", \"YEAR\", \"MON\",", "= os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics sl0808\") self.statisticsDaily(bystationDir, dailyDir, \"0832\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir)", "999999 min_temp = 999999 # self._logger.error((\"{1}, Station {0} miss temperature\") # .format(sid, year))", "Guide\", prefix_chars=\"-+\") parser.add_argument(\"--version\", action=\"version\", version=\"%(prog)s 0.0.1\") tool.defineArgumentParser(parser) args = parser.parse_args() print(args) logger =", "subdir, \"year0808\") print(\"statistics sl0808\") self.statisticsDaily(bystationDir, dailyDir, \"0832\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # def", "= 999999 min_pres = 999999 # self._logger.error((\"{1}, Station {0} miss pressure.\") # .format(sid,", "index+24]) # index = index + 24 # if index > last_rec: #", "\"monthly0808\") yearDir = os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics sl0808\") self.statisticsDaily(bystationDir, dailyDir, \"0832\") self.statisticsMonthly(dailyDir, monthlyDir)", "= am_prec[\"PREC\"].sum() prec12_am_cnt = len(am_prec) if prec12_am_cnt == 0: prec12_am = 999999 prec12_pm", "srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsYearSingleStatation(item, srcPath, targetPath) def stasticsYearSingleStatation(self,", "files organized \\ by day into files organized by station. and \\ statisics", "# parser.add_argument(\"source\", action=\"store\", # help=\"root dir for source files\") parser.add_argument(\"target\", action=\"store\", help=\"root dir", "recs.query(\"60 > AVG_TEMP > -60\") if len(valid_temperature) >= 10: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp", "prec12_pm_cnt) return rec def statisticsMonthly(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item", "0: prec24 = 999999 am_prec = valid_prec.query(\"HR <=8 | HR>20\") pm_prec = valid_prec.query(\"8", "# self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0} miss temperature\") # .format(sid, year, mon,)) #", "strfmt.format(items[0], int(items[1]), int(items[2]), # int(items[3]), int(items[4]), # float(items[5]), float(items[6]), # float(items[7])) # group[items[0]].append(rec)", ".format(targetRoot)) shutil.rmtree(targetRoot, True) time.sleep(1) if not os.path.exists(targetRoot): os.makedirs(targetRoot) if __name__ == \"__main__\": #", "min_temp = valid_temperature[\"MIN_TEMP\"].min() else: avg_temp = 999999 max_temp = 999999 min_temp = 999999", ">= 0\") prec24_year = valid_prec[\"PREC24_MON\"].sum() prec24_cnt = len(valid_prec) if prec24_year == 0: prec24_year", "= 999999 min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0} miss temperature", "self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0} miss temperature at\" # \"[02, 08, 14, 20]\")", "= 999999 valid_prec = recs.query(\"500 > PREC24 >= 0 & CNT == 24\")", "curDay, -8) # if not recs.empty: day_rec = self.calcDaily(sid, curDay, recs, stat_win) if", "\"CNT\", \"PREC20_08\", \"C1\", \"PREC08_20\", \"C2\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close()", "# fo.writelines(recs_emt # today = today + timedelta(days=1) # fo.flush() # fo.close() def", "self.queryData(db, curDay, 4) else: recs = self.queryData(db, curDay, -8) # if not recs.empty:", "open(os.path.join(parentDir, item), 'r+') as fo: # recs = fo.readlines() # sample = recs[0].split()", "k) # recs_w = [ # strfmt.format(k, year, mon, day, i, # 999999,", "\\ by day into files organized by station. and \\ statisics for daily", "\"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_Y\", \"CNT\", \"PREC24_Y\", \"CNT24\") with open(targetPath, 'w')", "# \"SID\", \"DATETIME\", \"YEAR\", # \"MON\", \"DAY\", \"HR\", # \"PRES\", \"TEMP\", \"PREC\") #", "temperature valid_temperature = recs.query(\"60 > AVG_TEMP > -60\") if len(valid_temperature) >= 10: avg_temp", "[] # if items[7] == \"999990\": # items[7] == \"0\" # rec =", "self.statisticsDaily(bystationDir, dailyDir, \"2020\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 08-08, shuili subdir = \"sl0808\"", "int(filename[6:8]) # recs = [] # with open(srcPath) as f: # recs =", "valid_prec[\"PREC24_MON\"].sum() prec24_cnt = len(valid_prec) if prec24_year == 0: prec24_year = 999999 rec =", "time from datetime import date from datetime import timedelta, datetime import pandas as", "== \"0808\" or stat_win == \"0832\": rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid,", "< DATETIME <= {1}\".format(whf.strftime(\"%Y%m%d%H\"), wht.strftime(\"%Y%m%d%H\")) recs = db.query(cond) return recs def calcDaily(self, sid,", "os.listdir(srcPathRoot) for item in filelist: srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item)", "== \"0808\" or stat_win == \"0832\": header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\", \"DATE\",", "recs = f.readlines() # recs = recs[1:] # f.close() # group = {}", "# .format(sid, dt.year, # dt.month, dt.day)) # statistics temperature # valid_temperature = hours24.query(\"60", "\"DAY\", \"HR\", # \"PRES\", \"TEMP\", \"PREC\") # filelist = sorted(os.listdir(parentDir)) # print(filelist) #", "mon_rec = self.calcMonthly(sid, year, mon, recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>12}{:>6}\\n\").format( \"SID\", \"YEAR\",", "999999 rec = (\"{:>8}{:>6}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>4d}{:>10.1f}{:>6d}\\n\") \\ .format(sid, year, avg_pres, max_pres, min_pres, avg_temp,", "of loop y_series = db[\"YEAR\"] endDay = date(y_series.max()+1, 1, 1) curDay = date(y_series.min(),", "if len(valid_temperature) == 4: avg_temp = valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min()", "dt.strftime(\"%Y%m%d\"), dt.year, dt.month, dt.day, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec24, prec24_cnt, prec12_pm,", "srcPath = os.path.join(srcPathRoot, item) # print(srcPath) # self.convert(srcPath, targetPathRoot) # self.insertHeader(targetPathRoot) # def", "= today # fo.seek(0) # fo.write(header) # index = 0 # last_rec =", "None: result.append(day_rec) curDay = curDay + timedelta(days=1) if stat_win == \"0808\" or stat_win", "avg_temp = 999999 max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}, \" #", "statisticsMonthly(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in filelist: srcPath =", "# recs = recs[1:] # f.close() # group = {} # strfmt =", "+ timedelta(days=1) if stat_win == \"0808\" or stat_win == \"0832\": header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\"", "# filelist = sorted(os.listdir(srcPathRoot)) # for item in filelist: # srcPath = os.path.join(srcPathRoot,", "# if not os.path.exists(srcPath): # self._loggej.info(\"Failed: {0} does't existe\".format(srcPath)) # filename = os.path.basename(srcPath)", "header = (\"{:>8}{:>6}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>12}{:>6}\\n\").format( \"SID\", \"YEAR\", \"MON\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\",", "\"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC20_08\", \"C1\", \"PREC08_20\", \"C2\") with open(targetPath, 'w') as fo: fo.write(header)", "Station {0} miss temperature\") # .format(sid, year)) # statistics precipation valid_prec = recs.query(\"5000", "= \"sl0808\" dailyDir = os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly0808\") yearDir", "print(\"statistics sl0808\") self.statisticsDaily(bystationDir, dailyDir, \"0832\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # def batchConvert(self, srcPathRoot,", "change valid_pressure = hours24.query((\"1200> PRES > 600 \\ & HR in [2, 8,", "by station. and \\ statisics for daily and monthly.\") self._version = \"surf4hourstool.py 0.0.1\"", "prec12_am, prec12_am_cnt, prec12_pm, prec12_pm_cnt) return rec def statisticsMonthly(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist =", "for i in range(24)] # fo.writelines(recs_emt # today = today + timedelta(days=1) #", "self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0} miss pressure.\") # .format(sid, year, mon)) # statistics", "\"\"\"The tool is designed to convert surf files orgarnized by month into files", "and will been overrided.\" .format(targetRoot)) shutil.rmtree(targetRoot, True) time.sleep(1) if not os.path.exists(targetRoot): os.makedirs(targetRoot) if", "\"SID\", \"DATETIME\", \"YEAR\", # \"MON\", \"DAY\", \"HR\", # \"PRES\", \"TEMP\", \"PREC\") # filelist", "999999 # self._logger.error((\"{1}, Station {0} miss pressure.\") # .format(sid, year)) # statistics temperature", "curDay + timedelta(days=1) if stat_win == \"0808\" or stat_win == \"0832\": header =", "not empty and will been overrided.\" .format(targetRoot)) shutil.rmtree(targetRoot, True) time.sleep(1) if not os.path.exists(targetRoot):", "targetPathRoot): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in filelist: srcPath = os.path.join(srcPathRoot, item)", "srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsMonthSingleStatation(item, srcPath, targetPath) def stasticsMonthSingleStatation(self,", "targetRoot = args.target # print(srcRoot, \"-->\", targetRoot) bystationDir = os.path.join(targetRoot, \"bystation\") # self.batchConvert(srcRoot,", "prec24_cnt = len(valid_prec) if prec24_cnt == 0: prec24 = 999999 am_prec = valid_prec.query(\"HR", "avg_pres = valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else: valid_pressure = hours24.query((\"1200>", "-60 \\ & HR in [2, 8, 14, 20]\") # print(valid_temperature) if len(valid_temperature)", "http://www.szmb.gov.cn/quf/2009/08/2017101815192310488.pdf \"\"\" if (len(hours24) > 24): self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \\ Station {0} has more than", "\"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # year = today.year # mon = today.month # day = today.day", "dt.month, dt.day, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec24, prec24_cnt, prec12_pm, prec12_pm_cnt, prec12_am,", "min_temp, prec24, prec24_cnt, prec12_am, prec12_am_cnt, prec12_pm, prec12_pm_cnt) return rec def statisticsMonthly(self, srcPathRoot, targetPathRoot):", "# items = line.split() # # try: # recs_w[int(items[5])] = line # #", "= 999999 # self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0} miss pressure.\") # .format(sid, year,", "self.batchConvert(srcRoot, bystationDir) # 08-08, qixiang subdir = \"qx0808\" dailyDir = os.path.join(targetRoot, subdir, \"daily0808\")", "self._loggej.info(\"Failed: {0} does't existe\".format(srcPath)) # filename = os.path.basename(srcPath) # year = int(filename[:4]) #", "if not recs.empty: mon_rec = self.calcYear(sid, year, recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>6}\\n\").format(", "# self.insertHeader(targetPathRoot) # def convert(self, srcPath, targetRoot): # if not os.path.exists(srcPath): # self._loggej.info(\"Failed:", "station. and \\ statisics for daily and monthly.\") self._version = \"surf4hourstool.py 0.0.1\" def", "cond = \"YEAR == {0}\".format(year) recs = db.query(cond) if not recs.empty: mon_rec =", "os.path.join(targetPathRoot, item) self.stasticsMonthSingleStatation(item, srcPath, targetPath) def stasticsMonthSingleStatation(self, sid, srcPath, targetPath): db = pd.read_table(srcPath,", "todo: do config the range of loop y_series = db[\"YEAR\"] year_begin = y_series.min()", "= 999999 valid_prec = recs.query(\"5000 > PREC24_MON >= 0\") prec24_year = valid_prec[\"PREC24_MON\"].sum() prec24_cnt", "\"daily2020\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly2020\") yearDir = os.path.join(targetRoot, subdir, \"year2020\") print(\"statistics qx2020\")", "< HR <= 20\") prec12_am = am_prec[\"PREC\"].sum() prec12_am_cnt = len(am_prec) if prec12_am_cnt ==", "= date(year, mon, day) # nextday = today # fo.seek(0) # fo.write(header) #", "= 999999 max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station", "dt.year, dt.month, dt.day, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec24, prec24_cnt, prec12_am, prec12_am_cnt,", "(\"{:>8}{:>6}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>12}{:>6}\\n\").format( \"SID\", \"YEAR\", \"MON\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_MON\", \"CNT\",", "== 0: prec12_pm = 999999 if stat_win == \"0808\" or stat_win == \"0832\":", "db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True) result = [] # todo: do config the", "stasticsMonthSingleStatation(self, sid, srcPath, targetPath): db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATE\") result = []", "HR in [2, 8, 14, 20]\") if len(valid_temperature) == 4: avg_temp = valid_temperature[\"TEMP\"].mean()", "as pd from ..base.toolbase import ToolBase class Surf4HoursTool(ToolBase): \"\"\"The tool is designed to", "= \"qx0808\" dailyDir = os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly0808\") yearDir", "nextday = date(year, mon, day) # else: # strfmt = ( # \"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\"", "= ( # \"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # year = today.year #", "= valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else: avg_temp = 999999 max_temp = 999999 min_temp", "pressure.\") # .format(sid, year, mon)) # statistics temperature valid_temperature = recs.query(\"60 > AVG_TEMP", "PRES > 600 \\ & HR in [2, 8, 14, 20]\")) # print(valid_pressure)", "> -60\") # print(valid_temperature) if len(valid_temperature) >= 24: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp =", "recs.query(\"5000 > PREC24_MON >= 0\") prec24_year = valid_prec[\"PREC24_MON\"].sum() prec24_cnt = len(valid_prec) if prec24_year", "valid_prec.query(\"HR <=8 | HR>20\") pm_prec = valid_prec.query(\"8 < HR <= 20\") prec12_am =", "today.month # day = today.day # recs_empty = [ # strfmt.format( # sid,", "as fo: # fo.writelines(recs_w) # fo.close() # def insertHeader(self, parentDir): # header =", "# sid, year, mon, day, i, # 999999, 999999, 999999) # for i", "index_col=\"DATETIME\") result = [] # todo: do config the range of loop y_series", "\"PREC08_20\", \"C2\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def queryData(self, db,", "not os.path.exists(srcPath): # self._loggej.info(\"Failed: {0} does't existe\".format(srcPath)) # filename = os.path.basename(srcPath) # year", "\"0832\": header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\", \"DATE\", \"YEAR\", \"MON\", \"DAY\", \"AVG_PRES\", \"MAX_PRES\",", "= os.path.join(targetPathRoot, item) self.stasticsDailySingleStatation(item, srcPath, targetPath, stat_win) def stasticsDailySingleStatation(self, sid, srcPath, targetPath, stat_win):", "20]\") if len(valid_temperature) == 4: avg_temp = valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp =", "dir of {0} is not empty and will been overrided.\" .format(targetRoot)) shutil.rmtree(targetRoot, True)", "1, 1) while(curDay < endDay): if stat_win == \"0808\": recs = self.queryData(db, curDay,", "with open(srcPath) as f: # recs = f.readlines() # recs = recs[1:] #", "year, recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>6}\\n\").format( \"SID\", \"YEAR\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\",", "== 0: prec12_am = 999999 prec12_pm = pm_prec[\"PREC\"].sum() prec12_pm_cnt = len(pm_prec) if prec12_pm_cnt", "= datetime(dt.year, dt.month, dt.day, 0, 0, 0) \\ - timedelta(hours=df_hours) wht = whf", "if os.path.exists(targetRoot) and len(os.listdir(targetRoot)) > 0: print(\"\\nThe dir of {0} is not empty", "dt.month, dt.day)) # statistics temperature # valid_temperature = hours24.query(\"60 > TEMP > -60\")", "0 # last_rec = len(recs) - 1 # while index < last_rec: #", "int(sample[3]) # day = int(sample[4]) # today = date(year, mon, day) # nextday", "dt.day)) else: # statistics pressure # valid_pressure = hours24.query(\"1200 > PRES > 600\")", "\"HR\", # \"PRES\", \"TEMP\", \"PREC\") # filelist = sorted(os.listdir(parentDir)) # print(filelist) # for", "line in v: # items = line.split() # # try: # recs_w[int(items[5])] =", "monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 08-08, shuili subdir = \"sl0808\" dailyDir = os.path.join(targetRoot, subdir,", "\"PRES\", \"TEMP\", \"PREC\") # filelist = sorted(os.listdir(parentDir)) # print(filelist) # for item in", "self.queryData(db, curDay, 16) elif stat_win == \"2020\": recs = self.queryData(db, curDay, 4) else:", ">= 10: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max() min_temp = valid_temperature[\"MIN_TEMP\"].min() else: avg_temp", "class Surf4HoursTool(ToolBase): \"\"\"The tool is designed to convert surf files orgarnized by month", "os.path.join(targetRoot, k) # recs_w = [ # strfmt.format(k, year, mon, day, i, #", ".format(sid, year, mon)) # statistics temperature valid_temperature = recs.query(\"60 > AVG_TEMP > -60\")", "if len(valid_temperature) == 24: # ok for 24 hours avg_temp = valid_temperature[\"TEMP\"].mean() max_temp", "\"daily0808\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly0808\") yearDir = os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics sl0808\")", "item), 'r+') as fo: # recs = fo.readlines() # sample = recs[0].split() #", "> last_rec: # break # sample = recs[index].split() # sid = sample[0] #", "20]\")) if len(valid_pressure) == 4: avg_pres = valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres =", "do config the range of loop y_series = db[\"YEAR\"] year_begin = y_series.min() year_end", "== \"2020\": recs = self.queryData(db, curDay, 4) else: recs = self.queryData(db, curDay, -8)", "date from datetime import timedelta, datetime import pandas as pd from ..base.toolbase import", "= line # # except: # # print(\"An exception occurred\", line, items) #", "\"MON\", \"DAY\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC20_08\", \"C1\", \"PREC08_20\",", "\"2020\": recs = self.queryData(db, curDay, 4) else: recs = self.queryData(db, curDay, -8) #", "\\ & HR in [2, 8, 14, 20]\")) # print(valid_pressure) if len(valid_pressure) ==", "fo: # recs = fo.readlines() # sample = recs[0].split() # sid = sample[0]", "\"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\", \"DATE\", \"YEAR\", \"MON\", \"DAY\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\",", "\"TEMP\", \"PREC\") # filelist = sorted(os.listdir(parentDir)) # print(filelist) # for item in filelist:", "open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def calcYear(self, sid, year, recs): if", "do config the range of loop y_series = db[\"YEAR\"] endDay = date(y_series.max()+1, 1,", "len(valid_temperature) >= 24: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max() min_temp = valid_temperature[\"MIN_TEMP\"].min() else:", "prec24, prec24_cnt, prec12_am, prec12_am_cnt, prec12_pm, prec12_pm_cnt) return rec def statisticsMonthly(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot)", "import pandas as pd from ..base.toolbase import ToolBase class Surf4HoursTool(ToolBase): \"\"\"The tool is", "\"PREC_Y\", \"CNT\", \"PREC24_Y\", \"CNT24\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def", "year_end = y_series.max() + 1 for year in range(year_begin, year_end): for mon in", "== 24: # ok for 24 hours avg_pres = valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max()", "\"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>4d}{:>10.1f}{:>6d}\\n\") \\ .format(sid, year, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec_year, prec_cnt,", "email: import os import shutil import time from datetime import date from datetime", "> 600 \\ & HR in [2, 8, 14, 20]\")) # print(valid_pressure) if", "# dt.month, dt.day)) # statistics temperature # valid_temperature = hours24.query(\"60 > TEMP >", "\"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # for rec in recs: # items = rec.split(\",\") #", "action=\"version\", version=\"%(prog)s 0.0.1\") tool.defineArgumentParser(parser) args = parser.parse_args() print(args) logger = Logger(\"./log/d2s.log\") tool.attachLogger(logger) targetRoot", "= recs.query(\"60 > AVG_TEMP > -60\") # print(valid_temperature) if len(valid_temperature) >= 24: avg_temp", "HR in [2, 8, 14, 20]\")) # print(valid_pressure) if len(valid_pressure) == 24: #", "len(valid_pressure) == 24: # ok for 24 hours avg_pres = valid_pressure[\"PRES\"].mean() max_pres =", "08, 14, 20]\") # .format(sid, dt.year, # dt.month, dt.day)) # statistics temperature #", "recs.query(\"1200 > AVG_PRES > 800\") if len(valid_pressure) >= 10: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres", "sl0808\") self.statisticsDaily(bystationDir, dailyDir, \"0832\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # def batchConvert(self, srcPathRoot, targetPathRoot):", "f.readlines() # recs = recs[1:] # f.close() # group = {} # strfmt", "station IDs and statisics for daily and monthly.\"\"\" def __init__(self): ToolBase.__init__(self, \"Surf4HoursTool\", \"The", "= valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max() min_pres = valid_pressure[\"MIN_PRES\"].min() else: avg_pres = 999999 max_pres", "> -60\") if len(valid_temperature) >= 10: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max() min_temp", "14, 20]\") # .format(sid, dt.year, # dt.month, dt.day)) # statistics precipation valid_prec =", "\"C2\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def queryData(self, db, dt,", "valid_pressure[\"PRES\"].min() else: valid_pressure = hours24.query((\"1200> PRES > 600 \\ & HR in [2,", "dt.month, dt.day, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec24, prec24_cnt, prec12_am, prec12_am_cnt, prec12_pm,", "\"MAX_TEMP\", \"MIN_TEMP\", \"PREC_Y\", \"CNT\", \"PREC24_Y\", \"CNT24\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result)", "\"{:>12.1f}{:>6d}\\n\") \\ .format(sid, year, mon, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec_mon, prec_cnt,", "min_temp = valid_temperature[\"TEMP\"].min() else: valid_temperature = hours24.query(\"60> TEMP > -60 \\ & HR", "am_prec[\"PREC\"].sum() prec12_am_cnt = len(am_prec) if prec12_am_cnt == 0: prec12_am = 999999 prec12_pm =", "in range(year_begin, year_end): for mon in range(1, 13): cond = \"YEAR == {0}", "= self.queryData(db, curDay, -8) # if not recs.empty: day_rec = self.calcDaily(sid, curDay, recs,", "dt.day, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec24, prec24_cnt, prec12_pm, prec12_pm_cnt, prec12_am, prec12_am_cnt)", "\"999990\": # items[7] == \"0\" # rec = strfmt.format(items[0], int(items[1]), int(items[2]), # int(items[3]),", "999999 valid_prec = recs.query(\"5000 > PREC24_MON >= 0\") prec24_year = valid_prec[\"PREC24_MON\"].sum() prec24_cnt =", "srcPathRoot, targetPathRoot, stat_win): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in filelist: srcPath =", "for rec in recs: # items = rec.split(\",\") # if items[0] not in", "\"0832\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # def batchConvert(self, srcPathRoot, targetPathRoot): # self.clearDirectory(targetPathRoot) #", "print(\"processing {0}\".format(srcPath)) db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATETIME\") result = [] # todo:", "999999 min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0} miss pressure.\") #", "0: prec24_year = 999999 rec = (\"{:>8}{:>6}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>4d}{:>10.1f}{:>6d}\\n\") \\ .format(sid, year, avg_pres,", "# fo.close() # def insertHeader(self, parentDir): # header = (\"{0:>8}{1:>12}{2:>6}{3:>4}{4:>4}{5:>4}\" # \"{6:>12}{7:>12}{8:>12}\\n\").format( #", "10: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max() min_pres = valid_pressure[\"MIN_PRES\"].min() else: avg_pres =", "-60\") # print(valid_temperature) if len(valid_temperature) >= 24: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max()", "= 999999 min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0} miss pressure.\")", "= valid_prec[\"PREC24\"].sum() prec24_cnt = len(valid_prec) if prec24_cnt == 0: prec24_mon = 999999 rec", "(\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\", \"DATE\", \"YEAR\", \"MON\", \"DAY\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\",", "if stat_win == \"0808\": recs = self.queryData(db, curDay, 16) elif stat_win == \"2020\":", "24: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max() min_temp = valid_temperature[\"MIN_TEMP\"].min() else: avg_temp =", "recs = db.query(cond) if not recs.empty: mon_rec = self.calcYear(sid, year, recs) result.append(mon_rec) header", "srcPath, targetPath) def stasticsMonthSingleStatation(self, sid, srcPath, targetPath): db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATE\")", "= today.month # day = today.day # recs_empty = [ # strfmt.format( #", "valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else: avg_pres = 999999 max_pres = 999999 min_pres =", "# valid_temperature = hours24.query(\"60 > TEMP > -60\") valid_temperature = hours24.query(\"60> TEMP >", "os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics qx0808\") self.statisticsDaily(bystationDir, dailyDir, \"0808\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) #", "curDay, 16) elif stat_win == \"2020\": recs = self.queryData(db, curDay, 4) else: recs", "08-08, qixiang subdir = \"qx0808\" dailyDir = os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir = os.path.join(targetRoot,", "14, 20]\") if len(valid_temperature) == 4: avg_temp = valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp", "PREC24_MON >= 0\") prec24_year = valid_prec[\"PREC24_MON\"].sum() prec24_cnt = len(valid_prec) if prec24_year == 0:", "(\"{:>8}{:>6}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}{:>4d}\" \"{:>12.1f}{:>6d}\\n\") \\ .format(sid, year, mon, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp,", "\"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC08_20\", \"C1\", \"PREC20_08\", \"C2\") else: header", "== 4: avg_temp = valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else: avg_temp", "month into files by station IDs and statisics for daily and monthly.\"\"\" def", "print(valid_temperature) if len(valid_temperature) >= 24: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max() min_temp =", "+ 1 # year_begin = 2015 # year_end = 2016 for year in", "= valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else: valid_temperature = hours24.query(\"60> TEMP", "= args.target # print(srcRoot, \"-->\", targetRoot) bystationDir = os.path.join(targetRoot, \"bystation\") # self.batchConvert(srcRoot, bystationDir)", "# COPYRIGHT 2016 igsnrr # # MORE INFO ... # email: import os", "\"qx0808\" dailyDir = os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly0808\") yearDir =", "items[7] == \"999990\": # items[7] == \"0\" # rec = strfmt.format(items[0], int(items[1]), int(items[2]),", "# 20-20, qixiang subdir = \"qx2020\" dailyDir = os.path.join(targetRoot, subdir, \"daily2020\") monthlyDir =", "recs[0].split() # sid = sample[0] # year = int(sample[2]) # mon = int(sample[3])", "# import sys # print(sys.argv) tool = Surf4HoursTool() import argparse from ..base.logger import", "= 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0} miss temperature at\" # \"[02,", "min_pres, avg_temp, max_temp, min_temp, prec24, prec24_cnt, prec12_am, prec12_am_cnt, prec12_pm, prec12_pm_cnt) return rec def", "\"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_MON\", \"CNT\", \"PREC24_MON\", \"CNT24\") with open(targetPath, 'w')", "for daily and monthly.\") self._version = \"surf4hourstool.py 0.0.1\" def defineArgumentParser(self, parser): # parser.add_argument(\"source\",", "# \"Station {0} miss temperature at\" # \"[02, 08, 14, 20]\") # .format(sid,", "max_pres, min_pres, avg_temp, max_temp, min_temp, prec_mon, prec_cnt, prec24_mon, prec24_cnt) return rec def statisticsYears(self,", "mon) recs = db.query(cond) if not recs.empty: mon_rec = self.calcMonthly(sid, year, mon, recs)", "20-20, qixiang subdir = \"qx2020\" dailyDir = os.path.join(targetRoot, subdir, \"daily2020\") monthlyDir = os.path.join(targetRoot,", "{0} has more than 24 records on\").format( sid, dt.year, dt.month, dt.day)) else: #", "len(valid_prec) if prec_cnt == 0: prec_mon = 999999 valid_prec = recs.query(\"500 > PREC24", "clearDirectory(self, targetRoot): if os.path.exists(targetRoot) and len(os.listdir(targetRoot)) > 0: print(\"\\nThe dir of {0} is", "min_pres = valid_pressure[\"PRES\"].min() else: valid_pressure = hours24.query((\"1200> PRES > 600 \\ & HR", "int(filename[4:6]) # day = int(filename[6:8]) # recs = [] # with open(srcPath) as", "\\ .format(sid, year, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec_year, prec_cnt, prec24_year, prec24_cnt)", "db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATETIME\") result = [] # todo: do config", "pressure.\") # .format(sid, year)) # statistics temperature valid_temperature = recs.query(\"60 > AVG_TEMP >", "more than 24 records on\").format( sid, dt.year, dt.month, dt.day)) else: # statistics pressure", "print(valid_pressure) if len(valid_pressure) == 24: # ok for 24 hours avg_pres = valid_pressure[\"PRES\"].mean()", "20]\") # .format(sid, dt.year, # dt.month, dt.day)) # statistics temperature # valid_temperature =", "today.day # recs_empty = [ # strfmt.format( # sid, year, mon, day, i,", "fo.write(header) fo.writelines(result) fo.close() def queryData(self, db, dt, df_hours=4): whf = datetime(dt.year, dt.month, dt.day,", "range of loop y_series = db[\"YEAR\"] endDay = date(y_series.max()+1, 1, 1) curDay =", "- 1 # while index < last_rec: # if nextday == today: #", "# year = today.year # mon = today.month # day = today.day #", "day, i, # 999999, 999999, 999999) # for i in range(24)] # fo.writelines(recs_emt", "avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max() min_pres = valid_pressure[\"MIN_PRES\"].min() else: avg_pres = 999999", "prec24_cnt) return rec def statisticsYears(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item", "# strfmt.format(k, year, mon, day, i, # 999999, 999999, 999999) # for i", "-*- coding: utf-8 -*- # COPYRIGHT 2016 igsnrr # # MORE INFO ...", "# break # sample = recs[index].split() # sid = sample[0] # year =", "\"CNT24\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def calcMonthly(self, sid, year,", "> AVG_TEMP > -60\") if len(valid_temperature) >= 10: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp =", "# mon = int(filename[4:6]) # day = int(filename[6:8]) # recs = [] #", "{1}\".format(whf.strftime(\"%Y%m%d%H\"), wht.strftime(\"%Y%m%d%H\")) recs = db.query(cond) return recs def calcDaily(self, sid, dt, hours24, stat_win):", "if len(valid_pressure) == 24: # ok for 24 hours avg_pres = valid_pressure[\"PRES\"].mean() max_pres", "return rec def statisticsYears(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in", "def statisticsYears(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in filelist: srcPath", "parser.add_argument(\"target\", action=\"store\", help=\"root dir for all data\") def run(self, args): # srcRoot =", "prec12_am_cnt = len(am_prec) if prec12_am_cnt == 0: prec12_am = 999999 prec12_pm = pm_prec[\"PREC\"].sum()", "CNT == 24\") prec24_mon = valid_prec[\"PREC24\"].sum() prec24_cnt = len(valid_prec) if prec24_cnt == 0:", "# fo.write(header) # index = 0 # last_rec = len(recs) - 1 #", "# print(valid_pressure) if len(valid_pressure) >= 24: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max() min_pres", "{0} miss pressure.\") # .format(sid, year, mon)) # statistics temperature valid_temperature = recs.query(\"60", "avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec24, prec24_cnt, prec12_am, prec12_am_cnt, prec12_pm, prec12_pm_cnt) return", "fo.writelines(recs_w) # fo.close() # def insertHeader(self, parentDir): # header = (\"{0:>8}{1:>12}{2:>6}{3:>4}{4:>4}{5:>4}\" # \"{6:>12}{7:>12}{8:>12}\\n\").format(", "targetPath = os.path.join(targetPathRoot, item) self.stasticsMonthSingleStatation(item, srcPath, targetPath) def stasticsMonthSingleStatation(self, sid, srcPath, targetPath): db", "# \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # year = today.year # mon = today.month # day =", "self._logger.error((\"{1}, Station {0} miss pressure.\") # .format(sid, year)) # statistics temperature valid_temperature =", "ok for 24 hours avg_pres = valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min()", "item) targetPath = os.path.join(targetPathRoot, item) self.stasticsDailySingleStatation(item, srcPath, targetPath, stat_win) def stasticsDailySingleStatation(self, sid, srcPath,", "\"YEAR\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_Y\", \"CNT\", \"PREC24_Y\", \"CNT24\") with open(targetPath,", "4) else: recs = self.queryData(db, curDay, -8) # if not recs.empty: day_rec =", "day) # else: # strfmt = ( # \"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\")", "fo.readlines() # sample = recs[0].split() # sid = sample[0] # year = int(sample[2])", "y_series = db[\"YEAR\"] year_begin = y_series.min() year_end = y_series.max() + 1 for year", "recs_w = [ # strfmt.format(k, year, mon, day, i, # 999999, 999999, 999999)", "prec24_year, prec24_cnt) return rec def clearDirectory(self, targetRoot): if os.path.exists(targetRoot) and len(os.listdir(targetRoot)) > 0:", ".format(sid, year, mon, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec_mon, prec_cnt, prec24_mon, prec24_cnt)", "-m surf4hourstool\", description=\"Surf4HoursTool Usage Guide\", prefix_chars=\"-+\") parser.add_argument(\"--version\", action=\"version\", version=\"%(prog)s 0.0.1\") tool.defineArgumentParser(parser) args =", "elif stat_win == \"2020\": recs = self.queryData(db, curDay, 4) else: recs = self.queryData(db,", "convert surf files organized \\ by day into files organized by station. and", "at\" # \"[02, 08, 14, 20]\") # .format(sid, dt.year, # dt.month, dt.day)) #", "= today.day # recs_empty = [ # strfmt.format( # sid, year, mon, day,", "\" # \"Station {0} miss pressure.\") # .format(sid, year, mon)) # statistics temperature", "dt.month, dt.day)) # statistics precipation valid_prec = hours24.query(\"200 > PREC >= 0\") prec24", "# recs = [] # with open(srcPath) as f: # recs = f.readlines()", "except: # # print(\"An exception occurred\", line, items) # with open(target, 'a') as", "\"PREC24_Y\", \"CNT24\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def calcYear(self, sid,", "DATETIME <= {1}\".format(whf.strftime(\"%Y%m%d%H\"), wht.strftime(\"%Y%m%d%H\")) recs = db.query(cond) return recs def calcDaily(self, sid, dt,", "= self.queryData(db, curDay, 4) else: recs = self.queryData(db, curDay, -8) # if not", "= db[\"YEAR\"] year_begin = y_series.min() year_end = y_series.max() + 1 for year in", "= valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max() min_temp = valid_temperature[\"MIN_TEMP\"].min() else: avg_temp = 999999 max_temp", "subdir, \"year2020\") print(\"statistics qx2020\") self.statisticsDaily(bystationDir, dailyDir, \"2020\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 08-08,", "queryData(self, db, dt, df_hours=4): whf = datetime(dt.year, dt.month, dt.day, 0, 0, 0) \\", "# self.convert(srcPath, targetPathRoot) # self.insertHeader(targetPathRoot) # def convert(self, srcPath, targetRoot): # if not", "> AVG_PRES > 800\") if len(valid_pressure) >= 10: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres =", "action=\"store\", # help=\"root dir for source files\") parser.add_argument(\"target\", action=\"store\", help=\"root dir for all", "dt.month, dt.day, 0, 0, 0) \\ - timedelta(hours=df_hours) wht = whf + timedelta(hours=24)", "min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0} miss pressure.\") # .format(sid,", "targetPathRoot, stat_win): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in filelist: srcPath = os.path.join(srcPathRoot,", "\"PREC20_08\", \"C1\", \"PREC08_20\", \"C2\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def", "# todo: do config the range of loop y_series = db[\"YEAR\"] endDay =", "len(recs) > 0: # statistics pressure valid_pressure = recs.query(\"1200 > AVG_PRES > 800\")", "recs, stat_win) if day_rec is not None: result.append(day_rec) curDay = curDay + timedelta(days=1)", "= valid_prec.query(\"HR <=8 | HR>20\") pm_prec = valid_prec.query(\"8 < HR <= 20\") prec12_am", "self.statisticsYears(monthlyDir, yearDir) # 08-08, shuili subdir = \"sl0808\" dailyDir = os.path.join(targetRoot, subdir, \"daily0808\")", "break # sample = recs[index].split() # sid = sample[0] # year = int(sample[2])", "while index < last_rec: # if nextday == today: # fo.writelines(recs[index: index+24]) #", "recs = fo.readlines() # sample = recs[0].split() # sid = sample[0] # year", "temperature # valid_temperature = hours24.query(\"60 > TEMP > -60\") valid_temperature = hours24.query(\"60> TEMP", "= len(valid_prec) if prec_cnt == 0: prec_mon = 999999 valid_prec = recs.query(\"500 >", "# filelist = sorted(os.listdir(parentDir)) # print(filelist) # for item in filelist: # with", "0\") prec24_year = valid_prec[\"PREC24_MON\"].sum() prec24_cnt = len(valid_prec) if prec24_year == 0: prec24_year =", "sid, srcPath, targetPath): db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATE\") result = [] #", "PRES > 600\") # temporary change valid_pressure = hours24.query((\"1200> PRES > 600 \\", "= y_series.max() + 1 # year_begin = 2015 # year_end = 2016 for", "len(valid_prec) if prec24_year == 0: prec24_year = 999999 rec = (\"{:>8}{:>6}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>4d}{:>10.1f}{:>6d}\\n\")", "# except: # # print(\"An exception occurred\", line, items) # with open(target, 'a')", "avg_temp, max_temp, min_temp, prec24, prec24_cnt, prec12_am, prec12_am_cnt, prec12_pm, prec12_pm_cnt) return rec def statisticsMonthly(self,", "# !/usr/bin/python # -*- coding: utf-8 -*- # COPYRIGHT 2016 igsnrr # #", "avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec_year, prec_cnt, prec24_year, prec24_cnt) return rec def", "len(valid_temperature) == 24: # ok for 24 hours avg_temp = valid_temperature[\"TEMP\"].mean() max_temp =", "prec12_pm, prec12_pm_cnt) return rec def statisticsMonthly(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for", "> 24): self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \\ Station {0} has more than 24 records on\").format( sid,", "= recs[index].split() # sid = sample[0] # year = int(sample[2]) # mon =", "2015 # year_end = 2016 for year in range(year_begin, year_end): cond = \"YEAR", "filelist: # srcPath = os.path.join(srcPathRoot, item) # print(srcPath) # self.convert(srcPath, targetPathRoot) # self.insertHeader(targetPathRoot)", "valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else: valid_temperature = hours24.query(\"60> TEMP > -60 \\ &", "{0}\".format(srcPath)) db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATETIME\") result = [] # todo: do", "recs = db.query(cond) if not recs.empty: mon_rec = self.calcMonthly(sid, year, mon, recs) result.append(mon_rec)", "on\").format( sid, dt.year, dt.month, dt.day)) else: # statistics pressure # valid_pressure = hours24.query(\"1200", "\"SID\", \"YEAR\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_Y\", \"CNT\", \"PREC24_Y\", \"CNT24\") with", "qixiang subdir = \"qx2020\" dailyDir = os.path.join(targetRoot, subdir, \"daily2020\") monthlyDir = os.path.join(targetRoot, subdir,", "24): self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \\ Station {0} has more than 24 records on\").format( sid, dt.year,", "= y_series.min() year_end = y_series.max() + 1 # year_begin = 2015 # year_end", "recs def calcDaily(self, sid, dt, hours24, stat_win): \"\"\" http://www.szmb.gov.cn/quf/2009/08/2017101815192310488.pdf \"\"\" if (len(hours24) >", "prec12_am_cnt == 0: prec12_am = 999999 prec12_pm = pm_prec[\"PREC\"].sum() prec12_pm_cnt = len(pm_prec) if", "{0} miss pressure at\" # \"[02, 08, 14, 20]\") # .format(sid, dt.year, #", "<= {1}\".format(whf.strftime(\"%Y%m%d%H\"), wht.strftime(\"%Y%m%d%H\")) recs = db.query(cond) return recs def calcDaily(self, sid, dt, hours24,", "fo.flush() # fo.close() def statisticsDaily(self, srcPathRoot, targetPathRoot, stat_win): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for", "min_temp = valid_temperature[\"TEMP\"].min() else: avg_temp = 999999 max_temp = 999999 min_temp = 999999", "= os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsYearSingleStatation(item, srcPath, targetPath) def stasticsYearSingleStatation(self, sid,", "= hours24.query(\"60 > TEMP > -60\") valid_temperature = hours24.query(\"60> TEMP > -60 \\", "= 999999 # self._logger.error((\"{1}, Station {0} miss pressure.\") # .format(sid, year)) # statistics", "mon = today.month # day = today.day # recs_empty = [ # strfmt.format(", "= os.path.join(targetRoot, subdir, \"year2020\") print(\"statistics qx2020\") self.statisticsDaily(bystationDir, dailyDir, \"2020\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir)", "try: # recs_w[int(items[5])] = line # # except: # # print(\"An exception occurred\",", "valid_prec[\"PREC24\"].sum() prec_cnt = len(valid_prec) if prec_cnt == 0: prec_mon = 999999 valid_prec =", "files by station IDs and statisics for daily and monthly.\"\"\" def __init__(self): ToolBase.__init__(self,", "stat_win): \"\"\" http://www.szmb.gov.cn/quf/2009/08/2017101815192310488.pdf \"\"\" if (len(hours24) > 24): self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \\ Station {0} has", "prec24_year == 0: prec24_year = 999999 rec = (\"{:>8}{:>6}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>4d}{:>10.1f}{:>6d}\\n\") \\ .format(sid,", "# items = rec.split(\",\") # if items[0] not in group: # group[items[0]] =", "\"surf4hourstool.py 0.0.1\" def defineArgumentParser(self, parser): # parser.add_argument(\"source\", action=\"store\", # help=\"root dir for source", "= 999999 if stat_win == \"0808\" or stat_win == \"0832\": rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\"", "= hours24.query((\"1200> PRES > 600 \\ & HR in [2, 8, 14, 20]\"))", "statistics precipation valid_prec = recs.query(\"500 > PREC24 >= 0\") prec_mon = valid_prec[\"PREC24\"].sum() prec_cnt", "else: rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid, dt.strftime(\"%Y%m%d\"), dt.year, dt.month, dt.day, avg_pres,", "defineArgumentParser(self, parser): # parser.add_argument(\"source\", action=\"store\", # help=\"root dir for source files\") parser.add_argument(\"target\", action=\"store\",", "= self.calcMonthly(sid, year, mon, recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>12}{:>6}\\n\").format( \"SID\", \"YEAR\", \"MON\",", "if len(valid_pressure) >= 10: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max() min_pres = valid_pressure[\"MIN_PRES\"].min()", "recs): if len(recs) > 0: # statistics pressure valid_pressure = recs.query(\"1200 > AVG_PRES", "prec12_pm, prec12_pm_cnt, prec12_am, prec12_am_cnt) else: rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid, dt.strftime(\"%Y%m%d\"),", "if len(valid_pressure) >= 24: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max() min_pres = valid_pressure[\"MIN_PRES\"].min()", "= valid_prec[\"PREC24_MON\"].sum() prec24_cnt = len(valid_prec) if prec24_year == 0: prec24_year = 999999 rec", "monthly.\"\"\" def __init__(self): ToolBase.__init__(self, \"Surf4HoursTool\", \"The Surf4Hours Tool convert surf files organized \\", "day = int(sample[4]) # nextday = date(year, mon, day) # else: # strfmt", "dt.month, dt.day)) else: # statistics pressure # valid_pressure = hours24.query(\"1200 > PRES >", "prec_cnt, prec24_year, prec24_cnt) return rec def clearDirectory(self, targetRoot): if os.path.exists(targetRoot) and len(os.listdir(targetRoot)) >", "valid_temperature[\"MAX_TEMP\"].max() min_temp = valid_temperature[\"MIN_TEMP\"].min() else: avg_temp = 999999 max_temp = 999999 min_temp =", "= os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsDailySingleStatation(item, srcPath, targetPath, stat_win) def stasticsDailySingleStatation(self,", "valid_prec[\"PREC\"].sum() prec24_cnt = len(valid_prec) if prec24_cnt == 0: prec24 = 999999 am_prec =", "datetime(dt.year, dt.month, dt.day, 0, 0, 0) \\ - timedelta(hours=df_hours) wht = whf +", "calcMonthly(self, sid, year, mon, recs): if len(recs) > 0: # statistics pressure valid_pressure", "1 # while index < last_rec: # if nextday == today: # fo.writelines(recs[index:", "filelist: srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsMonthSingleStatation(item, srcPath, targetPath) def", "hours24.query((\"1200> PRES > 600 \\ & HR in [2, 8, 14, 20]\")) if", "precipation valid_prec = hours24.query(\"200 > PREC >= 0\") prec24 = valid_prec[\"PREC\"].sum() prec24_cnt =", "# def convert(self, srcPath, targetRoot): # if not os.path.exists(srcPath): # self._loggej.info(\"Failed: {0} does't", "self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0} miss temperature\") # .format(sid, year, mon,)) # statistics", "os.path.basename(srcPath) # year = int(filename[:4]) # mon = int(filename[4:6]) # day = int(filename[6:8])", "# self._logger.error((\"{1}, Station {0} miss pressure.\") # .format(sid, year)) # statistics temperature valid_temperature", "min_pres = 999999 # self._logger.error((\"{1}, Station {0} miss pressure.\") # .format(sid, year)) #", "1) curDay = date(y_series.min(), 1, 1) while(curDay < endDay): if stat_win == \"0808\":", "stasticsDailySingleStatation(self, sid, srcPath, targetPath, stat_win): print(\"processing {0}\".format(srcPath)) db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATETIME\")", "valid_pressure[\"PRES\"].min() else: avg_pres = 999999 max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d},", "in filelist: # srcPath = os.path.join(srcPathRoot, item) # print(srcPath) # self.convert(srcPath, targetPathRoot) #", "= 999999 # self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0} miss temperature\") # .format(sid, year,", "fo.write(header) # index = 0 # last_rec = len(recs) - 1 # while", "0: prec12_pm = 999999 if stat_win == \"0808\" or stat_win == \"0832\": rec", "not os.path.exists(targetRoot): os.makedirs(targetRoot) if __name__ == \"__main__\": # testing code # import sys", "srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsDailySingleStatation(item, srcPath, targetPath, stat_win) def", "year_begin = y_series.min() year_end = y_series.max() + 1 # year_begin = 2015 #", "self._version = \"surf4hourstool.py 0.0.1\" def defineArgumentParser(self, parser): # parser.add_argument(\"source\", action=\"store\", # help=\"root dir", "prec24_mon = 999999 rec = (\"{:>8}{:>6}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}{:>4d}\" \"{:>12.1f}{:>6d}\\n\") \\ .format(sid, year, mon, avg_pres,", "\"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}{:>4d}\" \"{:>12.1f}{:>6d}\\n\") \\ .format(sid, year, mon, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec_mon,", "if nextday == today: # fo.writelines(recs[index: index+24]) # index = index + 24", "\"YEAR\", \"MON\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_MON\", \"CNT\", \"PREC24_MON\", \"CNT24\") with", "999999 max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}, Station {0} miss pressure.\")", "items = rec.split(\",\") # if items[0] not in group: # group[items[0]] = []", "len(pm_prec) if prec12_pm_cnt == 0: prec12_pm = 999999 if stat_win == \"0808\" or", "prec_cnt = len(valid_prec) if prec_cnt == 0: prec_year = 999999 valid_prec = recs.query(\"5000", "os.path.exists(targetRoot): os.makedirs(targetRoot) if __name__ == \"__main__\": # testing code # import sys #", "# MORE INFO ... # email: import os import shutil import time from", "self.clearDirectory(targetPathRoot) # filelist = sorted(os.listdir(srcPathRoot)) # for item in filelist: # srcPath =", "year_begin = y_series.min() year_end = y_series.max() + 1 for year in range(year_begin, year_end):", "in [2, 8, 14, 20]\") if len(valid_temperature) == 4: avg_temp = valid_temperature[\"TEMP\"].mean() max_temp", "print(args) logger = Logger(\"./log/d2s.log\") tool.attachLogger(logger) targetRoot = args.target tool.run(args) else: print(\"loading day2stationtool module\")", "avg_temp, max_temp, min_temp, prec24, prec24_cnt, prec12_pm, prec12_pm_cnt, prec12_am, prec12_am_cnt) else: rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\"", "statistics temperature valid_temperature = recs.query(\"60 > AVG_TEMP > -60\") # print(valid_temperature) if len(valid_temperature)", "0: prec_year = 999999 valid_prec = recs.query(\"5000 > PREC24_MON >= 0\") prec24_year =", "by month into files by station IDs and statisics for daily and monthly.\"\"\"", "# if items[0] not in group: # group[items[0]] = [] # if items[7]", "strfmt.format(k, year, mon, day, i, # 999999, 999999, 999999) # for i in", "(len(hours24) > 24): self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \\ Station {0} has more than 24 records on\").format(", "\"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_Y\", \"CNT\", \"PREC24_Y\", \"CNT24\") with open(targetPath, 'w') as", "run(self, args): # srcRoot = args.source targetRoot = args.target # print(srcRoot, \"-->\", targetRoot)", "len(valid_pressure) >= 24: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max() min_pres = valid_pressure[\"MIN_PRES\"].min() else:", "= len(valid_prec) if prec24_cnt == 0: prec24 = 999999 am_prec = valid_prec.query(\"HR <=8", "else: avg_pres = 999999 max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \"", "self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \\ Station {0} has more than 24 records on\").format( sid, dt.year, dt.month,", "files orgarnized by month into files by station IDs and statisics for daily", "= 999999 max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}, Station {0} miss", "= int(filename[6:8]) # recs = [] # with open(srcPath) as f: # recs", "statistics pressure valid_pressure = recs.query(\"1200 > AVG_PRES > 800\") if len(valid_pressure) >= 10:", "= int(filename[4:6]) # day = int(filename[6:8]) # recs = [] # with open(srcPath)", "= os.path.join(targetRoot, subdir, \"monthly0808\") yearDir = os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics sl0808\") self.statisticsDaily(bystationDir, dailyDir,", "# \"MON\", \"DAY\", \"HR\", # \"PRES\", \"TEMP\", \"PREC\") # filelist = sorted(os.listdir(parentDir)) #", "group = {} # strfmt = (\"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # for", "hours avg_pres = valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else: valid_pressure =", "\"0832\": rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid, dt.strftime(\"%Y%m%d\"), dt.year, dt.month, dt.day, avg_pres,", "skip_blank_lines=True, delim_whitespace=True, index_col=\"DATETIME\") result = [] # todo: do config the range of", "argparse from ..base.logger import Logger parser = argparse.ArgumentParser(prog=\"python -m surf4hourstool\", description=\"Surf4HoursTool Usage Guide\",", "db.query(cond) return recs def calcDaily(self, sid, dt, hours24, stat_win): \"\"\" http://www.szmb.gov.cn/quf/2009/08/2017101815192310488.pdf \"\"\" if", "= os.path.join(targetPathRoot, item) self.stasticsYearSingleStatation(item, srcPath, targetPath) def stasticsYearSingleStatation(self, sid, srcPath, targetPath): db =", "self.statisticsYears(monthlyDir, yearDir) # 20-20, qixiang subdir = \"qx2020\" dailyDir = os.path.join(targetRoot, subdir, \"daily2020\")", "mon, recs): if len(recs) > 0: # statistics pressure valid_pressure = recs.query(\"1200 >", "pandas as pd from ..base.toolbase import ToolBase class Surf4HoursTool(ToolBase): \"\"\"The tool is designed", "yearDir = os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics qx0808\") self.statisticsDaily(bystationDir, dailyDir, \"0808\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir,", "recs = self.queryData(db, curDay, 4) else: recs = self.queryData(db, curDay, -8) # if", "else: avg_temp = 999999 max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \"", "\"Station {0} miss pressure.\") # .format(sid, year, mon)) # statistics temperature valid_temperature =", "= rec.split(\",\") # if items[0] not in group: # group[items[0]] = [] #", "Usage Guide\", prefix_chars=\"-+\") parser.add_argument(\"--version\", action=\"version\", version=\"%(prog)s 0.0.1\") tool.defineArgumentParser(parser) args = parser.parse_args() print(args) logger", "\\ & HR in [2, 8, 14, 20]\") if len(valid_temperature) == 4: avg_temp", "\"year0808\") print(\"statistics qx0808\") self.statisticsDaily(bystationDir, dailyDir, \"0808\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 20-20, qixiang", "= int(sample[2]) # mon = int(sample[3]) # day = int(sample[4]) # today =", "= f.readlines() # recs = recs[1:] # f.close() # group = {} #", "surf4hourstool\", description=\"Surf4HoursTool Usage Guide\", prefix_chars=\"-+\") parser.add_argument(\"--version\", action=\"version\", version=\"%(prog)s 0.0.1\") tool.defineArgumentParser(parser) args = parser.parse_args()", "avg_temp = valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else: valid_temperature = hours24.query(\"60>", "srcPath, targetPath, stat_win): print(\"processing {0}\".format(srcPath)) db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATETIME\") result =", "= valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else: avg_temp = 999999 max_temp", "+ 1 for year in range(year_begin, year_end): for mon in range(1, 13): cond", "= (\"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # for rec in recs: # items", "valid_pressure = hours24.query(\"1200 > PRES > 600\") # temporary change valid_pressure = hours24.query((\"1200>", ".format(sid, year)) # statistics temperature valid_temperature = recs.query(\"60 > AVG_TEMP > -60\") if", "srcPath, targetPath): db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True) result = [] # todo: do", "pressure at\" # \"[02, 08, 14, 20]\") # .format(sid, dt.year, # dt.month, dt.day))", "\"daily0808\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly0808\") yearDir = os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics qx0808\")", "subdir, \"monthly0808\") yearDir = os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics qx0808\") self.statisticsDaily(bystationDir, dailyDir, \"0808\") self.statisticsMonthly(dailyDir,", "600 \\ & HR in [2, 8, 14, 20]\")) # print(valid_pressure) if len(valid_pressure)", "= \"YEAR == {0}\".format(year) recs = db.query(cond) if not recs.empty: mon_rec = self.calcYear(sid,", "= 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0} miss pressure at\" # \"[02,", "insertHeader(self, parentDir): # header = (\"{0:>8}{1:>12}{2:>6}{3:>4}{4:>4}{5:>4}\" # \"{6:>12}{7:>12}{8:>12}\\n\").format( # \"SID\", \"DATETIME\", \"YEAR\", #", "srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in filelist: srcPath = os.path.join(srcPathRoot,", "timedelta(days=1) # fo.flush() # fo.close() def statisticsDaily(self, srcPathRoot, targetPathRoot, stat_win): self.clearDirectory(targetPathRoot) filelist =", "= hours24.query(\"200 > PREC >= 0\") prec24 = valid_prec[\"PREC\"].sum() prec24_cnt = len(valid_prec) if", "config the range of loop y_series = db[\"YEAR\"] year_begin = y_series.min() year_end =", "os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly0808\") yearDir = os.path.join(targetRoot, subdir, \"year0808\")", "files organized by station. and \\ statisics for daily and monthly.\") self._version =", "# todo: do config the range of loop y_series = db[\"YEAR\"] year_begin =", "monthly.\") self._version = \"surf4hourstool.py 0.0.1\" def defineArgumentParser(self, parser): # parser.add_argument(\"source\", action=\"store\", # help=\"root", "curDay = date(y_series.min(), 1, 1) while(curDay < endDay): if stat_win == \"0808\": recs", "Surf4HoursTool(ToolBase): \"\"\"The tool is designed to convert surf files orgarnized by month into", "not recs.empty: mon_rec = self.calcMonthly(sid, year, mon, recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>12}{:>6}\\n\").format(", "fo.write(header) fo.writelines(result) fo.close() def calcYear(self, sid, year, recs): if len(recs) > 0: #", ".format(sid, dt.strftime(\"%Y%m%d\"), dt.year, dt.month, dt.day, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec24, prec24_cnt,", "max_temp = valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else: avg_temp = 999999 max_temp = 999999", "been overrided.\" .format(targetRoot)) shutil.rmtree(targetRoot, True) time.sleep(1) if not os.path.exists(targetRoot): os.makedirs(targetRoot) if __name__ ==", "\"CNT\", \"PREC08_20\", \"C1\", \"PREC20_08\", \"C2\") else: header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\", \"DATE\",", "print(\"statistics qx2020\") self.statisticsDaily(bystationDir, dailyDir, \"2020\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 08-08, shuili subdir", "== 24\") prec24_mon = valid_prec[\"PREC24\"].sum() prec24_cnt = len(valid_prec) if prec24_cnt == 0: prec24_mon", "item in filelist: # srcPath = os.path.join(srcPathRoot, item) # print(srcPath) # self.convert(srcPath, targetPathRoot)", "\"PREC24\", \"CNT\", \"PREC20_08\", \"C1\", \"PREC08_20\", \"C2\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result)", "prec24_cnt = len(valid_prec) if prec24_year == 0: prec24_year = 999999 rec = (\"{:>8}{:>6}{:>10.1f}{:>10.1f}{:>10.1f}\"", "index = index + 24 # if index > last_rec: # break #", "\"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_MON\", \"CNT\", \"PREC24_MON\", \"CNT24\") with open(targetPath, 'w') as", "i, # 999999, 999999, 999999) # for i in range(24)] # for line", "recs.query(\"500 > PREC24 >= 0 & CNT == 24\") prec24_mon = valid_prec[\"PREC24\"].sum() prec24_cnt", "shutil import time from datetime import date from datetime import timedelta, datetime import", "valid_pressure = recs.query(\"1200 > AVG_PRES > 800\") # print(valid_pressure) if len(valid_pressure) >= 24:", "= 999999 prec12_pm = pm_prec[\"PREC\"].sum() prec12_pm_cnt = len(pm_prec) if prec12_pm_cnt == 0: prec12_pm", "yearDir) # 20-20, qixiang subdir = \"qx2020\" dailyDir = os.path.join(targetRoot, subdir, \"daily2020\") monthlyDir", "# # except: # # print(\"An exception occurred\", line, items) # with open(target,", "| HR>20\") pm_prec = valid_prec.query(\"8 < HR <= 20\") prec12_am = am_prec[\"PREC\"].sum() prec12_am_cnt", "valid_temperature[\"MIN_TEMP\"].min() else: avg_temp = 999999 max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1},", "does't existe\".format(srcPath)) # filename = os.path.basename(srcPath) # year = int(filename[:4]) # mon =", "prec24_cnt, prec12_pm, prec12_pm_cnt, prec12_am, prec12_am_cnt) else: rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid,", "== {1}\".format(year, mon) recs = db.query(cond) if not recs.empty: mon_rec = self.calcMonthly(sid, year,", "pressure # valid_pressure = hours24.query(\"1200 > PRES > 600\") # temporary change valid_pressure", "if len(valid_temperature) >= 24: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max() min_temp = valid_temperature[\"MIN_TEMP\"].min()", "occurred\", line, items) # with open(target, 'a') as fo: # fo.writelines(recs_w) # fo.close()", "{0} miss pressure.\") # .format(sid, year)) # statistics temperature valid_temperature = recs.query(\"60 >", "999999, 999999, 999999) # for i in range(24)] # for line in v:", "fo.close() def calcYear(self, sid, year, recs): if len(recs) > 0: # statistics pressure", "time.sleep(1) if not os.path.exists(targetRoot): os.makedirs(targetRoot) if __name__ == \"__main__\": # testing code #", "subdir, \"year0808\") print(\"statistics qx0808\") self.statisticsDaily(bystationDir, dailyDir, \"0808\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 20-20,", "in range(24)] # for line in v: # items = line.split() # #", "testing code # import sys # print(sys.argv) tool = Surf4HoursTool() import argparse from", "else: # statistics pressure # valid_pressure = hours24.query(\"1200 > PRES > 600\") #", "= recs.query(\"1200 > AVG_PRES > 800\") if len(valid_pressure) >= 10: avg_pres = valid_pressure[\"AVG_PRES\"].mean()", "10: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max() min_temp = valid_temperature[\"MIN_TEMP\"].min() else: avg_temp =", "self.statisticsDaily(bystationDir, dailyDir, \"0808\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 20-20, qixiang subdir = \"qx2020\"", "# today = date(year, mon, day) # nextday = today # fo.seek(0) #", "item) # print(srcPath) # self.convert(srcPath, targetPathRoot) # self.insertHeader(targetPathRoot) # def convert(self, srcPath, targetRoot):", "= int(sample[4]) # today = date(year, mon, day) # nextday = today #", "# print(srcRoot, \"-->\", targetRoot) bystationDir = os.path.join(targetRoot, \"bystation\") # self.batchConvert(srcRoot, bystationDir) # 08-08,", "self.statisticsDaily(bystationDir, dailyDir, \"0832\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # def batchConvert(self, srcPathRoot, targetPathRoot): #", "daily and monthly.\"\"\" def __init__(self): ToolBase.__init__(self, \"Surf4HoursTool\", \"The Surf4Hours Tool convert surf files", "= [ # strfmt.format(k, year, mon, day, i, # 999999, 999999, 999999) #", "24: # ok for 24 hours avg_pres = valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres", "if index > last_rec: # break # sample = recs[index].split() # sid =", "shutil.rmtree(targetRoot, True) time.sleep(1) if not os.path.exists(targetRoot): os.makedirs(targetRoot) if __name__ == \"__main__\": # testing", "year, mon, recs): if len(recs) > 0: # statistics pressure valid_pressure = recs.query(\"1200", "batchConvert(self, srcPathRoot, targetPathRoot): # self.clearDirectory(targetPathRoot) # filelist = sorted(os.listdir(srcPathRoot)) # for item in", "{} # strfmt = (\"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # for rec in", "+ timedelta(days=1) # fo.flush() # fo.close() def statisticsDaily(self, srcPathRoot, targetPathRoot, stat_win): self.clearDirectory(targetPathRoot) filelist", "= pm_prec[\"PREC\"].sum() prec12_pm_cnt = len(pm_prec) if prec12_pm_cnt == 0: prec12_pm = 999999 if", "# f.close() # group = {} # strfmt = (\"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" #", "# today = today + timedelta(days=1) # fo.flush() # fo.close() def statisticsDaily(self, srcPathRoot,", "> PREC24_MON >= 0\") prec24_year = valid_prec[\"PREC24_MON\"].sum() prec24_cnt = len(valid_prec) if prec24_year ==", "# statistics pressure valid_pressure = recs.query(\"1200 > AVG_PRES > 800\") # print(valid_pressure) if", "\\ statisics for daily and monthly.\") self._version = \"surf4hourstool.py 0.0.1\" def defineArgumentParser(self, parser):", "# index = 0 # last_rec = len(recs) - 1 # while index", "valid_prec[\"PREC24\"].sum() prec24_cnt = len(valid_prec) if prec24_cnt == 0: prec24_mon = 999999 rec =", "header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\", \"DATE\", \"YEAR\", \"MON\", \"DAY\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\",", "action=\"store\", help=\"root dir for all data\") def run(self, args): # srcRoot = args.source", "TEMP > -60\") valid_temperature = hours24.query(\"60> TEMP > -60 \\ & HR in", "self.stasticsYearSingleStatation(item, srcPath, targetPath) def stasticsYearSingleStatation(self, sid, srcPath, targetPath): db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True)", "for item in filelist: # srcPath = os.path.join(srcPathRoot, item) # print(srcPath) # self.convert(srcPath,", "{0} does't existe\".format(srcPath)) # filename = os.path.basename(srcPath) # year = int(filename[:4]) # mon", "dt.strftime(\"%Y%m%d\"), dt.year, dt.month, dt.day, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec24, prec24_cnt, prec12_am,", "\"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC08_20\", \"C1\", \"PREC20_08\", \"C2\") else: header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\"", "HR in [2, 8, 14, 20]\")) if len(valid_pressure) == 4: avg_pres = valid_pressure[\"PRES\"].mean()", "& HR in [2, 8, 14, 20]\") # print(valid_temperature) if len(valid_temperature) == 24:", "targetPath): db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True) result = [] # todo: do config", "if items[7] == \"999990\": # items[7] == \"0\" # rec = strfmt.format(items[0], int(items[1]),", "yearDir) # 08-08, shuili subdir = \"sl0808\" dailyDir = os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir", "help=\"root dir for all data\") def run(self, args): # srcRoot = args.source targetRoot", "<= 20\") prec12_am = am_prec[\"PREC\"].sum() prec12_am_cnt = len(am_prec) if prec12_am_cnt == 0: prec12_am", "in recs: # items = rec.split(\",\") # if items[0] not in group: #", "valid_pressure[\"MAX_PRES\"].max() min_pres = valid_pressure[\"MIN_PRES\"].min() else: avg_pres = 999999 max_pres = 999999 min_pres =", "= db[\"YEAR\"] endDay = date(y_series.max()+1, 1, 1) curDay = date(y_series.min(), 1, 1) while(curDay", "PREC24 >= 0 & CNT == 24\") prec24_mon = valid_prec[\"PREC24\"].sum() prec24_cnt = len(valid_prec)", "# try: # recs_w[int(items[5])] = line # # except: # # print(\"An exception", "\"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC08_20\", \"C1\", \"PREC20_08\", \"C2\") else:", "bystationDir) # 08-08, qixiang subdir = \"qx0808\" dailyDir = os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir", "os import shutil import time from datetime import date from datetime import timedelta,", "\"Surf4HoursTool\", \"The Surf4Hours Tool convert surf files organized \\ by day into files", "stat_win) def stasticsDailySingleStatation(self, sid, srcPath, targetPath, stat_win): print(\"processing {0}\".format(srcPath)) db = pd.read_table(srcPath, skip_blank_lines=True,", "avg_temp, max_temp, min_temp, prec_mon, prec_cnt, prec24_mon, prec24_cnt) return rec def statisticsYears(self, srcPathRoot, targetPathRoot):", "today = today + timedelta(days=1) # fo.flush() # fo.close() def statisticsDaily(self, srcPathRoot, targetPathRoot,", "\"-->\", targetRoot) bystationDir = os.path.join(targetRoot, \"bystation\") # self.batchConvert(srcRoot, bystationDir) # 08-08, qixiang subdir", "# statistics temperature valid_temperature = recs.query(\"60 > AVG_TEMP > -60\") # print(valid_temperature) if", "with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def calcYear(self, sid, year, recs):", "# year = int(sample[2]) # mon = int(sample[3]) # day = int(sample[4]) #", "if len(recs) > 0: # statistics pressure valid_pressure = recs.query(\"1200 > AVG_PRES >", "last_rec: # if nextday == today: # fo.writelines(recs[index: index+24]) # index = index", "open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def queryData(self, db, dt, df_hours=4): whf", "with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def calcMonthly(self, sid, year, mon,", "# if not recs.empty: day_rec = self.calcDaily(sid, curDay, recs, stat_win) if day_rec is", "year_end): cond = \"YEAR == {0}\".format(year) recs = db.query(cond) if not recs.empty: mon_rec", "= 999999 min_temp = 999999 # self._logger.error((\"{1}, Station {0} miss temperature\") # .format(sid,", "# fo.close() def statisticsDaily(self, srcPathRoot, targetPathRoot, stat_win): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item", "for year in range(year_begin, year_end): cond = \"YEAR == {0}\".format(year) recs = db.query(cond)", "in v: # items = line.split() # # try: # recs_w[int(items[5])] = line", "# fo.flush() # fo.close() def statisticsDaily(self, srcPathRoot, targetPathRoot, stat_win): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot)", "= len(valid_prec) if prec24_cnt == 0: prec24_mon = 999999 rec = (\"{:>8}{:>6}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}{:>4d}\"", "999999 if stat_win == \"0808\" or stat_win == \"0832\": rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\"", "df_hours=4): whf = datetime(dt.year, dt.month, dt.day, 0, 0, 0) \\ - timedelta(hours=df_hours) wht", "def __init__(self): ToolBase.__init__(self, \"Surf4HoursTool\", \"The Surf4Hours Tool convert surf files organized \\ by", "of {0} is not empty and will been overrided.\" .format(targetRoot)) shutil.rmtree(targetRoot, True) time.sleep(1)", "mon, day, i, # 999999, 999999, 999999) # for i in range(24)] #", "sid, year, mon, day, i, # 999999, 999999, 999999) # for i in", "loop y_series = db[\"YEAR\"] endDay = date(y_series.max()+1, 1, 1) curDay = date(y_series.min(), 1,", "header = (\"{:>8}{:>6}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>6}\\n\").format( \"SID\", \"YEAR\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_Y\",", "curDay, 4) else: recs = self.queryData(db, curDay, -8) # if not recs.empty: day_rec", "timedelta, datetime import pandas as pd from ..base.toolbase import ToolBase class Surf4HoursTool(ToolBase): \"\"\"The", "mon, recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>12}{:>6}\\n\").format( \"SID\", \"YEAR\", \"MON\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\",", "avg_pres = 999999 max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}, Station {0}", "\"0808\" or stat_win == \"0832\": header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\", \"DATE\", \"YEAR\",", "files\") parser.add_argument(\"target\", action=\"store\", help=\"root dir for all data\") def run(self, args): # srcRoot", "result.append(day_rec) curDay = curDay + timedelta(days=1) if stat_win == \"0808\" or stat_win ==", "= parser.parse_args() print(args) logger = Logger(\"./log/d2s.log\") tool.attachLogger(logger) targetRoot = args.target tool.run(args) else: print(\"loading", "rec = (\"{:>8}{:>6}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}{:>4d}\" \"{:>12.1f}{:>6d}\\n\") \\ .format(sid, year, mon, avg_pres, max_pres, min_pres, avg_temp,", "else: avg_temp = 999999 max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}, \"", "targetPath) def stasticsYearSingleStatation(self, sid, srcPath, targetPath): db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True) result =", "self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0} miss pressure at\" # \"[02, 08, 14, 20]\")", "group: # group[items[0]] = [] # if items[7] == \"999990\": # items[7] ==", "and monthly.\"\"\" def __init__(self): ToolBase.__init__(self, \"Surf4HoursTool\", \"The Surf4Hours Tool convert surf files organized", "header = (\"{0:>8}{1:>12}{2:>6}{3:>4}{4:>4}{5:>4}\" # \"{6:>12}{7:>12}{8:>12}\\n\").format( # \"SID\", \"DATETIME\", \"YEAR\", # \"MON\", \"DAY\", \"HR\",", "# print(valid_temperature) if len(valid_temperature) >= 24: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max() min_temp", "999999 am_prec = valid_prec.query(\"HR <=8 | HR>20\") pm_prec = valid_prec.query(\"8 < HR <=", "\\ - timedelta(hours=df_hours) wht = whf + timedelta(hours=24) cond = \"{0} < DATETIME", "float(items[7])) # group[items[0]].append(rec) # for k, v in group.items(): # target = os.path.join(targetRoot,", "avg_pres = valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else: avg_pres = 999999", "date(year, mon, day) # nextday = today # fo.seek(0) # fo.write(header) # index", "temperature at\" # \"[02, 08, 14, 20]\") # .format(sid, dt.year, # dt.month, dt.day))", "endDay): if stat_win == \"0808\": recs = self.queryData(db, curDay, 16) elif stat_win ==", "import shutil import time from datetime import date from datetime import timedelta, datetime", "return rec def statisticsMonthly(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in", "statisticsYears(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in filelist: srcPath =", "= recs.query(\"500 > PREC24 >= 0\") prec_mon = valid_prec[\"PREC24\"].sum() prec_cnt = len(valid_prec) if", "0) \\ - timedelta(hours=df_hours) wht = whf + timedelta(hours=24) cond = \"{0} <", "fo: fo.write(header) fo.writelines(result) fo.close() def calcMonthly(self, sid, year, mon, recs): if len(recs) >", "# statistics precipation valid_prec = recs.query(\"500 > PREC24 >= 0\") prec_mon = valid_prec[\"PREC24\"].sum()", "14, 20]\")) if len(valid_pressure) == 4: avg_pres = valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres", "mon, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec_mon, prec_cnt, prec24_mon, prec24_cnt) return rec", "1) while(curDay < endDay): if stat_win == \"0808\": recs = self.queryData(db, curDay, 16)", "hours24, stat_win): \"\"\" http://www.szmb.gov.cn/quf/2009/08/2017101815192310488.pdf \"\"\" if (len(hours24) > 24): self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \\ Station {0}", "# valid_pressure = hours24.query(\"1200 > PRES > 600\") # temporary change valid_pressure =", "db[\"YEAR\"] year_begin = y_series.min() year_end = y_series.max() + 1 for year in range(year_begin,", "= 2016 for year in range(year_begin, year_end): cond = \"YEAR == {0}\".format(year) recs", "+ timedelta(hours=24) cond = \"{0} < DATETIME <= {1}\".format(whf.strftime(\"%Y%m%d%H\"), wht.strftime(\"%Y%m%d%H\")) recs = db.query(cond)", "items[7] == \"0\" # rec = strfmt.format(items[0], int(items[1]), int(items[2]), # int(items[3]), int(items[4]), #", "== today: # fo.writelines(recs[index: index+24]) # index = index + 24 # if", "nextday == today: # fo.writelines(recs[index: index+24]) # index = index + 24 #", "> 0: # statistics pressure valid_pressure = recs.query(\"1200 > AVG_PRES > 800\") if", "= self.queryData(db, curDay, 16) elif stat_win == \"2020\": recs = self.queryData(db, curDay, 4)", "= os.path.join(targetPathRoot, item) self.stasticsMonthSingleStatation(item, srcPath, targetPath) def stasticsMonthSingleStatation(self, sid, srcPath, targetPath): db =", "= valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else: valid_pressure = hours24.query((\"1200> PRES", "if prec_cnt == 0: prec_mon = 999999 valid_prec = recs.query(\"500 > PREC24 >=", "print(valid_temperature) if len(valid_temperature) == 24: # ok for 24 hours avg_temp = valid_temperature[\"TEMP\"].mean()", "# print(sys.argv) tool = Surf4HoursTool() import argparse from ..base.logger import Logger parser =", "mon = int(sample[3]) # day = int(sample[4]) # today = date(year, mon, day)", "db[\"YEAR\"] year_begin = y_series.min() year_end = y_series.max() + 1 # year_begin = 2015", "[] # with open(srcPath) as f: # recs = f.readlines() # recs =", "overrided.\" .format(targetRoot)) shutil.rmtree(targetRoot, True) time.sleep(1) if not os.path.exists(targetRoot): os.makedirs(targetRoot) if __name__ == \"__main__\":", "valid_temperature = recs.query(\"60 > AVG_TEMP > -60\") if len(valid_temperature) >= 10: avg_temp =", "# self._logger.error((\"{1}, Station {0} miss temperature\") # .format(sid, year)) # statistics precipation valid_prec", "coding: utf-8 -*- # COPYRIGHT 2016 igsnrr # # MORE INFO ... #", "os.path.join(targetRoot, subdir, \"monthly2020\") yearDir = os.path.join(targetRoot, subdir, \"year2020\") print(\"statistics qx2020\") self.statisticsDaily(bystationDir, dailyDir, \"2020\")", "8, 14, 20]\")) # print(valid_pressure) if len(valid_pressure) == 24: # ok for 24", "item) self.stasticsMonthSingleStatation(item, srcPath, targetPath) def stasticsMonthSingleStatation(self, sid, srcPath, targetPath): db = pd.read_table(srcPath, skip_blank_lines=True,", "int(items[2]), # int(items[3]), int(items[4]), # float(items[5]), float(items[6]), # float(items[7])) # group[items[0]].append(rec) # for", "sid = sample[0] # year = int(sample[2]) # mon = int(sample[3]) # day", "ToolBase class Surf4HoursTool(ToolBase): \"\"\"The tool is designed to convert surf files orgarnized by", "valid_prec[\"PREC_MON\"].sum() prec_cnt = len(valid_prec) if prec_cnt == 0: prec_year = 999999 valid_prec =", "version=\"%(prog)s 0.0.1\") tool.defineArgumentParser(parser) args = parser.parse_args() print(args) logger = Logger(\"./log/d2s.log\") tool.attachLogger(logger) targetRoot =", "recs_empty = [ # strfmt.format( # sid, year, mon, day, i, # 999999,", "= len(am_prec) if prec12_am_cnt == 0: prec12_am = 999999 prec12_pm = pm_prec[\"PREC\"].sum() prec12_pm_cnt", "24 # if index > last_rec: # break # sample = recs[index].split() #", "dt.day, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec24, prec24_cnt, prec12_am, prec12_am_cnt, prec12_pm, prec12_pm_cnt)", "dt.day)) # statistics temperature # valid_temperature = hours24.query(\"60 > TEMP > -60\") valid_temperature", "MORE INFO ... # email: import os import shutil import time from datetime", "qx2020\") self.statisticsDaily(bystationDir, dailyDir, \"2020\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 08-08, shuili subdir =", "not in group: # group[items[0]] = [] # if items[7] == \"999990\": #", "-8) # if not recs.empty: day_rec = self.calcDaily(sid, curDay, recs, stat_win) if day_rec", "= 999999 # self._logger.error((\"{1}, Station {0} miss temperature\") # .format(sid, year)) # statistics", "= valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else: valid_temperature = hours24.query(\"60> TEMP > -60 \\", "strfmt.format( # sid, year, mon, day, i, # 999999, 999999, 999999) # for", "subdir = \"qx0808\" dailyDir = os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly0808\")", "0: prec24_mon = 999999 rec = (\"{:>8}{:>6}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}{:>4d}\" \"{:>12.1f}{:>6d}\\n\") \\ .format(sid, year, mon,", "recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>6}\\n\").format( \"SID\", \"YEAR\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\",", "> AVG_PRES > 800\") # print(valid_pressure) if len(valid_pressure) >= 24: avg_pres = valid_pressure[\"AVG_PRES\"].mean()", "if not os.path.exists(targetRoot): os.makedirs(targetRoot) if __name__ == \"__main__\": # testing code # import", "# self.batchConvert(srcRoot, bystationDir) # 08-08, qixiang subdir = \"qx0808\" dailyDir = os.path.join(targetRoot, subdir,", "avg_pres = 999999 max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" #", "999999 min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0} miss temperature at\"", "\"PREC\") # filelist = sorted(os.listdir(parentDir)) # print(filelist) # for item in filelist: #", "prec24_cnt = len(valid_prec) if prec24_cnt == 0: prec24_mon = 999999 rec = (\"{:>8}{:>6}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\"", "def queryData(self, db, dt, df_hours=4): whf = datetime(dt.year, dt.month, dt.day, 0, 0, 0)", "# statistics precipation valid_prec = hours24.query(\"200 > PREC >= 0\") prec24 = valid_prec[\"PREC\"].sum()", "import argparse from ..base.logger import Logger parser = argparse.ArgumentParser(prog=\"python -m surf4hourstool\", description=\"Surf4HoursTool Usage", "than 24 records on\").format( sid, dt.year, dt.month, dt.day)) else: # statistics pressure #", "index_col=\"DATE\") result = [] # todo: do config the range of loop y_series", "valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else: valid_pressure = hours24.query((\"1200> PRES > 600 \\ &", "# print(srcPath) # self.convert(srcPath, targetPathRoot) # self.insertHeader(targetPathRoot) # def convert(self, srcPath, targetRoot): #", "item in filelist: # with open(os.path.join(parentDir, item), 'r+') as fo: # recs =", "= valid_prec[\"PREC_MON\"].sum() prec_cnt = len(valid_prec) if prec_cnt == 0: prec_year = 999999 valid_prec", "= args.source targetRoot = args.target # print(srcRoot, \"-->\", targetRoot) bystationDir = os.path.join(targetRoot, \"bystation\")", "\"Station {0} miss temperature\") # .format(sid, year, mon,)) # statistics precipation valid_prec =", "> TEMP > -60\") valid_temperature = hours24.query(\"60> TEMP > -60 \\ & HR", "sample[0] # year = int(sample[2]) # mon = int(sample[3]) # day = int(sample[4])", "= os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly0808\") yearDir = os.path.join(targetRoot, subdir,", "# .format(sid, year, mon,)) # statistics precipation valid_prec = recs.query(\"500 > PREC24 >=", "\\ & HR in [2, 8, 14, 20]\") # print(valid_temperature) if len(valid_temperature) ==", "prec24_mon = valid_prec[\"PREC24\"].sum() prec24_cnt = len(valid_prec) if prec24_cnt == 0: prec24_mon = 999999", "parser): # parser.add_argument(\"source\", action=\"store\", # help=\"root dir for source files\") parser.add_argument(\"target\", action=\"store\", help=\"root", "targetRoot): if os.path.exists(targetRoot) and len(os.listdir(targetRoot)) > 0: print(\"\\nThe dir of {0} is not", "by station IDs and statisics for daily and monthly.\"\"\" def __init__(self): ToolBase.__init__(self, \"Surf4HoursTool\",", "\"PREC08_20\", \"C1\", \"PREC20_08\", \"C2\") else: header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\", \"DATE\", \"YEAR\",", "prec24_year = 999999 rec = (\"{:>8}{:>6}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>4d}{:>10.1f}{:>6d}\\n\") \\ .format(sid, year, avg_pres, max_pres,", "valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else: valid_pressure = hours24.query((\"1200> PRES >", "year)) # statistics temperature valid_temperature = recs.query(\"60 > AVG_TEMP > -60\") if len(valid_temperature)", "= os.listdir(srcPathRoot) for item in filelist: srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot,", "year in range(year_begin, year_end): for mon in range(1, 13): cond = \"YEAR ==", "import ToolBase class Surf4HoursTool(ToolBase): \"\"\"The tool is designed to convert surf files orgarnized", "14, 20]\")) # print(valid_pressure) if len(valid_pressure) == 24: # ok for 24 hours", "= \"surf4hourstool.py 0.0.1\" def defineArgumentParser(self, parser): # parser.add_argument(\"source\", action=\"store\", # help=\"root dir for", "# year_end = 2016 for year in range(year_begin, year_end): cond = \"YEAR ==", "stat_win == \"0808\": recs = self.queryData(db, curDay, 16) elif stat_win == \"2020\": recs", "{0}\".format(year) recs = db.query(cond) if not recs.empty: mon_rec = self.calcYear(sid, year, recs) result.append(mon_rec)", "targetPath) def stasticsMonthSingleStatation(self, sid, srcPath, targetPath): db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATE\") result", "= 2015 # year_end = 2016 for year in range(year_begin, year_end): cond =", "def convert(self, srcPath, targetRoot): # if not os.path.exists(srcPath): # self._loggej.info(\"Failed: {0} does't existe\".format(srcPath))", "# self._loggej.info(\"Failed: {0} does't existe\".format(srcPath)) # filename = os.path.basename(srcPath) # year = int(filename[:4])", "convert(self, srcPath, targetRoot): # if not os.path.exists(srcPath): # self._loggej.info(\"Failed: {0} does't existe\".format(srcPath)) #", "\"MON\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_MON\", \"CNT\", \"PREC24_MON\", \"CNT24\") with open(targetPath,", "prec_cnt == 0: prec_year = 999999 valid_prec = recs.query(\"5000 > PREC24_MON >= 0\")", "= int(sample[4]) # nextday = date(year, mon, day) # else: # strfmt =", "\"SID\", \"YEAR\", \"MON\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_MON\", \"CNT\", \"PREC24_MON\", \"CNT24\")", "targetPathRoot): # self.clearDirectory(targetPathRoot) # filelist = sorted(os.listdir(srcPathRoot)) # for item in filelist: #", "filelist: srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsDailySingleStatation(item, srcPath, targetPath, stat_win)", "2016 igsnrr # # MORE INFO ... # email: import os import shutil", "= recs.query(\"1200 > AVG_PRES > 800\") # print(valid_pressure) if len(valid_pressure) >= 24: avg_pres", "== \"0808\": recs = self.queryData(db, curDay, 16) elif stat_win == \"2020\": recs =", "qixiang subdir = \"qx0808\" dailyDir = os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir = os.path.join(targetRoot, subdir,", "# year_begin = 2015 # year_end = 2016 for year in range(year_begin, year_end):", "= recs[1:] # f.close() # group = {} # strfmt = (\"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" #", "rec def statisticsMonthly(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in filelist:", "prec_mon = 999999 valid_prec = recs.query(\"500 > PREC24 >= 0 & CNT ==", "os.makedirs(targetRoot) if __name__ == \"__main__\": # testing code # import sys # print(sys.argv)", "600 \\ & HR in [2, 8, 14, 20]\")) if len(valid_pressure) == 4:", "tool = Surf4HoursTool() import argparse from ..base.logger import Logger parser = argparse.ArgumentParser(prog=\"python -m", "> -60 \\ & HR in [2, 8, 14, 20]\") if len(valid_temperature) ==", "= 999999 rec = (\"{:>8}{:>6}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}{:>4d}\" \"{:>12.1f}{:>6d}\\n\") \\ .format(sid, year, mon, avg_pres, max_pres,", "fo.writelines(recs_emt # today = today + timedelta(days=1) # fo.flush() # fo.close() def statisticsDaily(self,", "max_temp, min_temp, prec24, prec24_cnt, prec12_am, prec12_am_cnt, prec12_pm, prec12_pm_cnt) return rec def statisticsMonthly(self, srcPathRoot,", "recs = recs[1:] # f.close() # group = {} # strfmt = (\"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\"", "precipation valid_prec = recs.query(\"5000 > PREC_MON >= 0\") prec_year = valid_prec[\"PREC_MON\"].sum() prec_cnt =", "hours24.query((\"1200> PRES > 600 \\ & HR in [2, 8, 14, 20]\")) #", "= len(recs) - 1 # while index < last_rec: # if nextday ==", "stat_win): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in filelist: srcPath = os.path.join(srcPathRoot, item)", "> 800\") # print(valid_pressure) if len(valid_pressure) >= 24: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres =", "in filelist: srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsMonthSingleStatation(item, srcPath, targetPath)", "open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def calcMonthly(self, sid, year, mon, recs):", "fo.close() def queryData(self, db, dt, df_hours=4): whf = datetime(dt.year, dt.month, dt.day, 0, 0,", "fo.close() def calcMonthly(self, sid, year, mon, recs): if len(recs) > 0: # statistics", "max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}, Station {0} miss pressure.\") #", "# fo.writelines(recs[index: index+24]) # index = index + 24 # if index >", "year, mon, day, i, # 999999, 999999, 999999) # for i in range(24)]", "today + timedelta(days=1) # fo.flush() # fo.close() def statisticsDaily(self, srcPathRoot, targetPathRoot, stat_win): self.clearDirectory(targetPathRoot)", "if not recs.empty: mon_rec = self.calcMonthly(sid, year, mon, recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>4}{:>10}{:>10}{:>10}{:>10}\"", "for source files\") parser.add_argument(\"target\", action=\"store\", help=\"root dir for all data\") def run(self, args):", "def stasticsMonthSingleStatation(self, sid, srcPath, targetPath): db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATE\") result =", "# statistics pressure valid_pressure = recs.query(\"1200 > AVG_PRES > 800\") if len(valid_pressure) >=", "# nextday = today # fo.seek(0) # fo.write(header) # index = 0 #", "= (\"{:>8}{:>6}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>6}\\n\").format( \"SID\", \"YEAR\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_Y\", \"CNT\",", "\"MAX_TEMP\", \"MIN_TEMP\", \"PREC_MON\", \"CNT\", \"PREC24_MON\", \"CNT24\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result)", "len(valid_temperature) == 4: avg_temp = valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else:", "whf = datetime(dt.year, dt.month, dt.day, 0, 0, 0) \\ - timedelta(hours=df_hours) wht =", "line, items) # with open(target, 'a') as fo: # fo.writelines(recs_w) # fo.close() #", "min_temp, prec24, prec24_cnt, prec12_pm, prec12_pm_cnt, prec12_am, prec12_am_cnt) else: rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\")", "if prec24_cnt == 0: prec24_mon = 999999 rec = (\"{:>8}{:>6}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}{:>4d}\" \"{:>12.1f}{:>6d}\\n\") \\", "-60 \\ & HR in [2, 8, 14, 20]\") if len(valid_temperature) == 4:", "and \\ statisics for daily and monthly.\") self._version = \"surf4hourstool.py 0.0.1\" def defineArgumentParser(self,", "# def batchConvert(self, srcPathRoot, targetPathRoot): # self.clearDirectory(targetPathRoot) # filelist = sorted(os.listdir(srcPathRoot)) # for", "\"0808\": recs = self.queryData(db, curDay, 16) elif stat_win == \"2020\": recs = self.queryData(db,", "in filelist: srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsYearSingleStatation(item, srcPath, targetPath)", "!/usr/bin/python # -*- coding: utf-8 -*- # COPYRIGHT 2016 igsnrr # # MORE", "min_temp, prec_mon, prec_cnt, prec24_mon, prec24_cnt) return rec def statisticsYears(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist", "statistics precipation valid_prec = hours24.query(\"200 > PREC >= 0\") prec24 = valid_prec[\"PREC\"].sum() prec24_cnt", "self.insertHeader(targetPathRoot) # def convert(self, srcPath, targetRoot): # if not os.path.exists(srcPath): # self._loggej.info(\"Failed: {0}", "# target = os.path.join(targetRoot, k) # recs_w = [ # strfmt.format(k, year, mon,", "# filename = os.path.basename(srcPath) # year = int(filename[:4]) # mon = int(filename[4:6]) #", "parentDir): # header = (\"{0:>8}{1:>12}{2:>6}{3:>4}{4:>4}{5:>4}\" # \"{6:>12}{7:>12}{8:>12}\\n\").format( # \"SID\", \"DATETIME\", \"YEAR\", # \"MON\",", "fo.writelines(recs[index: index+24]) # index = index + 24 # if index > last_rec:", "# recs = fo.readlines() # sample = recs[0].split() # sid = sample[0] #", "result.append(mon_rec) header = (\"{:>8}{:>6}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>6}\\n\").format( \"SID\", \"YEAR\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\",", "datetime import date from datetime import timedelta, datetime import pandas as pd from", "= sorted(os.listdir(parentDir)) # print(filelist) # for item in filelist: # with open(os.path.join(parentDir, item),", "recs = db.query(cond) return recs def calcDaily(self, sid, dt, hours24, stat_win): \"\"\" http://www.szmb.gov.cn/quf/2009/08/2017101815192310488.pdf", "# \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # for rec in recs: # items = rec.split(\",\")", "def defineArgumentParser(self, parser): # parser.add_argument(\"source\", action=\"store\", # help=\"root dir for source files\") parser.add_argument(\"target\",", "\"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # for rec in recs: # items = rec.split(\",\") # if items[0]", "surf files organized \\ by day into files organized by station. and \\", "os.path.exists(targetRoot) and len(os.listdir(targetRoot)) > 0: print(\"\\nThe dir of {0} is not empty and", "== \"999990\": # items[7] == \"0\" # rec = strfmt.format(items[0], int(items[1]), int(items[2]), #", "\"MON\", \"DAY\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC08_20\", \"C1\", \"PREC20_08\",", "recs = self.queryData(db, curDay, 16) elif stat_win == \"2020\": recs = self.queryData(db, curDay,", "999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0} miss pressure at\" # \"[02, 08,", "'w') as fo: fo.write(header) fo.writelines(result) fo.close() def queryData(self, db, dt, df_hours=4): whf =", "in range(24)] # fo.writelines(recs_emt # today = today + timedelta(days=1) # fo.flush() #", "\"DAY\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC08_20\", \"C1\", \"PREC20_08\", \"C2\")", "# day = today.day # recs_empty = [ # strfmt.format( # sid, year,", "loop y_series = db[\"YEAR\"] year_begin = y_series.min() year_end = y_series.max() + 1 #", "targetPath): db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATE\") result = [] # todo: do", "self.calcDaily(sid, curDay, recs, stat_win) if day_rec is not None: result.append(day_rec) curDay = curDay", "for 24 hours avg_pres = valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else:", "= Surf4HoursTool() import argparse from ..base.logger import Logger parser = argparse.ArgumentParser(prog=\"python -m surf4hourstool\",", "= db[\"YEAR\"] year_begin = y_series.min() year_end = y_series.max() + 1 # year_begin =", "== 0: prec_mon = 999999 valid_prec = recs.query(\"500 > PREC24 >= 0 &", "\\ Station {0} has more than 24 records on\").format( sid, dt.year, dt.month, dt.day))", "\"DATE\", \"YEAR\", \"MON\", \"DAY\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC20_08\",", "sid, year, recs): if len(recs) > 0: # statistics pressure valid_pressure = recs.query(\"1200", "max_temp = valid_temperature[\"MAX_TEMP\"].max() min_temp = valid_temperature[\"MIN_TEMP\"].min() else: avg_temp = 999999 max_temp = 999999", "# \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # for rec in recs: # items = rec.split(\",\") # if", "for item in filelist: srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsDailySingleStatation(item,", "fo.write(header) fo.writelines(result) fo.close() def calcMonthly(self, sid, year, mon, recs): if len(recs) > 0:", "= recs.query(\"5000 > PREC_MON >= 0\") prec_year = valid_prec[\"PREC_MON\"].sum() prec_cnt = len(valid_prec) if", "\"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC20_08\", \"C1\", \"PREC08_20\", \"C2\") with", "mon = int(sample[3]) # day = int(sample[4]) # nextday = date(year, mon, day)", "parser.add_argument(\"--version\", action=\"version\", version=\"%(prog)s 0.0.1\") tool.defineArgumentParser(parser) args = parser.parse_args() print(args) logger = Logger(\"./log/d2s.log\") tool.attachLogger(logger)", "<filename>surf/surf4hourstool.py # !/usr/bin/python # -*- coding: utf-8 -*- # COPYRIGHT 2016 igsnrr #", "0: prec_mon = 999999 valid_prec = recs.query(\"500 > PREC24 >= 0 & CNT", "# print(\"An exception occurred\", line, items) # with open(target, 'a') as fo: #", "day = int(filename[6:8]) # recs = [] # with open(srcPath) as f: #", "8, 14, 20]\") # print(valid_temperature) if len(valid_temperature) == 24: # ok for 24", "y_series.min() year_end = y_series.max() + 1 for year in range(year_begin, year_end): for mon", "min_temp, prec_year, prec_cnt, prec24_year, prec24_cnt) return rec def clearDirectory(self, targetRoot): if os.path.exists(targetRoot) and", "\"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_Y\", \"CNT\", \"PREC24_Y\", \"CNT24\") with open(targetPath, 'w') as fo:", "0\") prec24 = valid_prec[\"PREC\"].sum() prec24_cnt = len(valid_prec) if prec24_cnt == 0: prec24 =", "# day = int(filename[6:8]) # recs = [] # with open(srcPath) as f:", "empty and will been overrided.\" .format(targetRoot)) shutil.rmtree(targetRoot, True) time.sleep(1) if not os.path.exists(targetRoot): os.makedirs(targetRoot)", "= valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else: valid_pressure = hours24.query((\"1200> PRES > 600 \\", "os.path.join(srcPathRoot, item) # print(srcPath) # self.convert(srcPath, targetPathRoot) # self.insertHeader(targetPathRoot) # def convert(self, srcPath,", "sid, dt.year, dt.month, dt.day)) else: # statistics pressure # valid_pressure = hours24.query(\"1200 >", "stat_win == \"0832\": rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid, dt.strftime(\"%Y%m%d\"), dt.year, dt.month,", "targetPath, stat_win) def stasticsDailySingleStatation(self, sid, srcPath, targetPath, stat_win): print(\"processing {0}\".format(srcPath)) db = pd.read_table(srcPath,", "as fo: # recs = fo.readlines() # sample = recs[0].split() # sid =", "prec24_mon, prec24_cnt) return rec def statisticsYears(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for", "'w') as fo: fo.write(header) fo.writelines(result) fo.close() def calcYear(self, sid, year, recs): if len(recs)", "24: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max() min_pres = valid_pressure[\"MIN_PRES\"].min() else: avg_pres =", "args = parser.parse_args() print(args) logger = Logger(\"./log/d2s.log\") tool.attachLogger(logger) targetRoot = args.target tool.run(args) else:", "# if items[7] == \"999990\": # items[7] == \"0\" # rec = strfmt.format(items[0],", "filelist: srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsYearSingleStatation(item, srcPath, targetPath) def", "= \"{0} < DATETIME <= {1}\".format(whf.strftime(\"%Y%m%d%H\"), wht.strftime(\"%Y%m%d%H\")) recs = db.query(cond) return recs def", "int(sample[2]) # mon = int(sample[3]) # day = int(sample[4]) # today = date(year,", "monthlyDir = os.path.join(targetRoot, subdir, \"monthly0808\") yearDir = os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics qx0808\") self.statisticsDaily(bystationDir,", "prec12_pm_cnt == 0: prec12_pm = 999999 if stat_win == \"0808\" or stat_win ==", "prec24_cnt == 0: prec24 = 999999 am_prec = valid_prec.query(\"HR <=8 | HR>20\") pm_prec", "'w') as fo: fo.write(header) fo.writelines(result) fo.close() def calcMonthly(self, sid, year, mon, recs): if", "# for item in filelist: # with open(os.path.join(parentDir, item), 'r+') as fo: #", "# mon = int(sample[3]) # day = int(sample[4]) # nextday = date(year, mon,", "\"MIN_TEMP\", \"PREC_Y\", \"CNT\", \"PREC24_Y\", \"CNT24\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close()", "\"MIN_TEMP\", \"PREC_MON\", \"CNT\", \"PREC24_MON\", \"CNT24\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close()", "srcPath, targetPath, stat_win) def stasticsDailySingleStatation(self, sid, srcPath, targetPath, stat_win): print(\"processing {0}\".format(srcPath)) db =", "prec_year = 999999 valid_prec = recs.query(\"5000 > PREC24_MON >= 0\") prec24_year = valid_prec[\"PREC24_MON\"].sum()", "= len(pm_prec) if prec12_pm_cnt == 0: prec12_pm = 999999 if stat_win == \"0808\"", "filelist = os.listdir(srcPathRoot) for item in filelist: srcPath = os.path.join(srcPathRoot, item) targetPath =", "dir for all data\") def run(self, args): # srcRoot = args.source targetRoot =", "= today.year # mon = today.month # day = today.day # recs_empty =", "delim_whitespace=True, index_col=\"DATE\") result = [] # todo: do config the range of loop", "# float(items[5]), float(items[6]), # float(items[7])) # group[items[0]].append(rec) # for k, v in group.items():", "subdir, \"monthly2020\") yearDir = os.path.join(targetRoot, subdir, \"year2020\") print(\"statistics qx2020\") self.statisticsDaily(bystationDir, dailyDir, \"2020\") self.statisticsMonthly(dailyDir,", "= hours24.query(\"60> TEMP > -60 \\ & HR in [2, 8, 14, 20]\")", "miss pressure.\") # .format(sid, year, mon)) # statistics temperature valid_temperature = recs.query(\"60 >", "def stasticsYearSingleStatation(self, sid, srcPath, targetPath): db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True) result = []", "recs.query(\"60 > AVG_TEMP > -60\") # print(valid_temperature) if len(valid_temperature) >= 24: avg_temp =", "miss temperature\") # .format(sid, year, mon,)) # statistics precipation valid_prec = recs.query(\"500 >", "self.calcYear(sid, year, recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>6}\\n\").format( \"SID\", \"YEAR\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\",", "== 0: prec24 = 999999 am_prec = valid_prec.query(\"HR <=8 | HR>20\") pm_prec =", "is designed to convert surf files orgarnized by month into files by station", "else: avg_pres = 999999 max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}, \"", "max_pres = valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else: valid_pressure = hours24.query((\"1200> PRES > 600", "args.target # print(srcRoot, \"-->\", targetRoot) bystationDir = os.path.join(targetRoot, \"bystation\") # self.batchConvert(srcRoot, bystationDir) #", "999999 max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0}", "\"{:>10}{:>10}{:>10}{:>4}{:>12}{:>6}\\n\").format( \"SID\", \"YEAR\", \"MON\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_MON\", \"CNT\", \"PREC24_MON\",", "def calcDaily(self, sid, dt, hours24, stat_win): \"\"\" http://www.szmb.gov.cn/quf/2009/08/2017101815192310488.pdf \"\"\" if (len(hours24) > 24):", "__init__(self): ToolBase.__init__(self, \"Surf4HoursTool\", \"The Surf4Hours Tool convert surf files organized \\ by day", "AVG_TEMP > -60\") if len(valid_temperature) >= 10: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max()", "INFO ... # email: import os import shutil import time from datetime import", "rec = strfmt.format(items[0], int(items[1]), int(items[2]), # int(items[3]), int(items[4]), # float(items[5]), float(items[6]), # float(items[7]))", "Station {0} miss pressure.\") # .format(sid, year)) # statistics temperature valid_temperature = recs.query(\"60", "prec_cnt = len(valid_prec) if prec_cnt == 0: prec_mon = 999999 valid_prec = recs.query(\"500", "min_pres, avg_temp, max_temp, min_temp, prec24, prec24_cnt, prec12_pm, prec12_pm_cnt, prec12_am, prec12_am_cnt) else: rec =", "TEMP > -60 \\ & HR in [2, 8, 14, 20]\") # print(valid_temperature)", "open(target, 'a') as fo: # fo.writelines(recs_w) # fo.close() # def insertHeader(self, parentDir): #", "= sorted(os.listdir(srcPathRoot)) # for item in filelist: # srcPath = os.path.join(srcPathRoot, item) #", "# \"{6:>12}{7:>12}{8:>12}\\n\").format( # \"SID\", \"DATETIME\", \"YEAR\", # \"MON\", \"DAY\", \"HR\", # \"PRES\", \"TEMP\",", "= 999999 min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0} miss temperature\")", "\"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # year = today.year # mon = today.month # day", "{0} miss temperature\") # .format(sid, year, mon,)) # statistics precipation valid_prec = recs.query(\"500", "\"[02, 08, 14, 20]\") # .format(sid, dt.year, # dt.month, dt.day)) # statistics precipation", "prec12_pm = 999999 if stat_win == \"0808\" or stat_win == \"0832\": rec =", "HR>20\") pm_prec = valid_prec.query(\"8 < HR <= 20\") prec12_am = am_prec[\"PREC\"].sum() prec12_am_cnt =", "group[items[0]] = [] # if items[7] == \"999990\": # items[7] == \"0\" #", "os.path.join(targetRoot, subdir, \"monthly0808\") yearDir = os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics qx0808\") self.statisticsDaily(bystationDir, dailyDir, \"0808\")", "PRES > 600 \\ & HR in [2, 8, 14, 20]\")) if len(valid_pressure)", "miss pressure at\" # \"[02, 08, 14, 20]\") # .format(sid, dt.year, # dt.month,", "result = [] # todo: do config the range of loop y_series =", "valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else: avg_temp = 999999 max_temp = 999999 min_temp =", "valid_pressure = hours24.query((\"1200> PRES > 600 \\ & HR in [2, 8, 14,", "= (\"{:>8}{:>6}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>4d}{:>10.1f}{:>6d}\\n\") \\ .format(sid, year, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp,", "== \"0832\": rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid, dt.strftime(\"%Y%m%d\"), dt.year, dt.month, dt.day,", "HR in [2, 8, 14, 20]\") # print(valid_temperature) if len(valid_temperature) == 24: #", "# .format(sid, dt.year, # dt.month, dt.day)) # statistics precipation valid_prec = hours24.query(\"200 >", "# int(items[3]), int(items[4]), # float(items[5]), float(items[6]), # float(items[7])) # group[items[0]].append(rec) # for k,", "\"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_MON\", \"CNT\", \"PREC24_MON\", \"CNT24\") with open(targetPath, 'w') as fo:", "if stat_win == \"0808\" or stat_win == \"0832\": rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\")", "\"0\" # rec = strfmt.format(items[0], int(items[1]), int(items[2]), # int(items[3]), int(items[4]), # float(items[5]), float(items[6]),", "# \"[02, 08, 14, 20]\") # .format(sid, dt.year, # dt.month, dt.day)) # statistics", "999999, 999999) # for i in range(24)] # fo.writelines(recs_emt # today = today", "data\") def run(self, args): # srcRoot = args.source targetRoot = args.target # print(srcRoot,", "items[0] not in group: # group[items[0]] = [] # if items[7] == \"999990\":", "else: avg_pres = 999999 max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}, Station", "999999 min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0} miss temperature\") #", "for item in filelist: # with open(os.path.join(parentDir, item), 'r+') as fo: # recs", "min_pres = valid_pressure[\"MIN_PRES\"].min() else: avg_pres = 999999 max_pres = 999999 min_pres = 999999", "# strfmt = ( # \"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # year =", ">= 0 & CNT == 24\") prec24_mon = valid_prec[\"PREC24\"].sum() prec24_cnt = len(valid_prec) if", "valid_prec.query(\"8 < HR <= 20\") prec12_am = am_prec[\"PREC\"].sum() prec12_am_cnt = len(am_prec) if prec12_am_cnt", "yearDir = os.path.join(targetRoot, subdir, \"year2020\") print(\"statistics qx2020\") self.statisticsDaily(bystationDir, dailyDir, \"2020\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir,", "> PRES > 600\") # temporary change valid_pressure = hours24.query((\"1200> PRES > 600", "= os.path.join(targetRoot, subdir, \"daily2020\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly2020\") yearDir = os.path.join(targetRoot, subdir,", "yearDir = os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics sl0808\") self.statisticsDaily(bystationDir, dailyDir, \"0832\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir,", "(\"{0:>8}{1:>12}{2:>6}{3:>4}{4:>4}{5:>4}\" # \"{6:>12}{7:>12}{8:>12}\\n\").format( # \"SID\", \"DATETIME\", \"YEAR\", # \"MON\", \"DAY\", \"HR\", # \"PRES\",", "= hours24.query(\"1200 > PRES > 600\") # temporary change valid_pressure = hours24.query((\"1200> PRES", "# \"PRES\", \"TEMP\", \"PREC\") # filelist = sorted(os.listdir(parentDir)) # print(filelist) # for item", "min_pres, avg_temp, max_temp, min_temp, prec_year, prec_cnt, prec24_year, prec24_cnt) return rec def clearDirectory(self, targetRoot):", "== 0: prec24_mon = 999999 rec = (\"{:>8}{:>6}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}{:>4d}\" \"{:>12.1f}{:>6d}\\n\") \\ .format(sid, year,", "recs.empty: day_rec = self.calcDaily(sid, curDay, recs, stat_win) if day_rec is not None: result.append(day_rec)", "len(valid_prec) if prec24_cnt == 0: prec24 = 999999 am_prec = valid_prec.query(\"HR <=8 |", "min_temp = 999999 # self._logger.error((\"{1}, Station {0} miss temperature\") # .format(sid, year)) #", "if prec24_cnt == 0: prec24 = 999999 am_prec = valid_prec.query(\"HR <=8 | HR>20\")", "pm_prec[\"PREC\"].sum() prec12_pm_cnt = len(pm_prec) if prec12_pm_cnt == 0: prec12_pm = 999999 if stat_win", "dailyDir = os.path.join(targetRoot, subdir, \"daily2020\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly2020\") yearDir = os.path.join(targetRoot,", "== \"__main__\": # testing code # import sys # print(sys.argv) tool = Surf4HoursTool()", "and len(os.listdir(targetRoot)) > 0: print(\"\\nThe dir of {0} is not empty and will", "fo: fo.write(header) fo.writelines(result) fo.close() def queryData(self, db, dt, df_hours=4): whf = datetime(dt.year, dt.month,", "mon in range(1, 13): cond = \"YEAR == {0} & MON == {1}\".format(year,", "# email: import os import shutil import time from datetime import date from", "stat_win == \"0808\" or stat_win == \"0832\": rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\", "valid_temperature[\"TEMP\"].min() else: valid_temperature = hours24.query(\"60> TEMP > -60 \\ & HR in [2,", "targetPathRoot) # self.insertHeader(targetPathRoot) # def convert(self, srcPath, targetRoot): # if not os.path.exists(srcPath): #", "# # try: # recs_w[int(items[5])] = line # # except: # # print(\"An", "20]\") # .format(sid, dt.year, # dt.month, dt.day)) # statistics precipation valid_prec = hours24.query(\"200", "yearDir) # def batchConvert(self, srcPathRoot, targetPathRoot): # self.clearDirectory(targetPathRoot) # filelist = sorted(os.listdir(srcPathRoot)) #", "config the range of loop y_series = db[\"YEAR\"] endDay = date(y_series.max()+1, 1, 1)", "{0} is not empty and will been overrided.\" .format(targetRoot)) shutil.rmtree(targetRoot, True) time.sleep(1) if", "..base.toolbase import ToolBase class Surf4HoursTool(ToolBase): \"\"\"The tool is designed to convert surf files", "prec24, prec24_cnt, prec12_pm, prec12_pm_cnt, prec12_am, prec12_am_cnt) else: rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\", "0, 0) \\ - timedelta(hours=df_hours) wht = whf + timedelta(hours=24) cond = \"{0}", "= argparse.ArgumentParser(prog=\"python -m surf4hourstool\", description=\"Surf4HoursTool Usage Guide\", prefix_chars=\"-+\") parser.add_argument(\"--version\", action=\"version\", version=\"%(prog)s 0.0.1\") tool.defineArgumentParser(parser)", "999999 prec12_pm = pm_prec[\"PREC\"].sum() prec12_pm_cnt = len(pm_prec) if prec12_pm_cnt == 0: prec12_pm =", "= date(year, mon, day) # else: # strfmt = ( # \"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" #", "prec12_pm = pm_prec[\"PREC\"].sum() prec12_pm_cnt = len(pm_prec) if prec12_pm_cnt == 0: prec12_pm = 999999", "== 0: prec_year = 999999 valid_prec = recs.query(\"5000 > PREC24_MON >= 0\") prec24_year", "# help=\"root dir for source files\") parser.add_argument(\"target\", action=\"store\", help=\"root dir for all data\")", "calcYear(self, sid, year, recs): if len(recs) > 0: # statistics pressure valid_pressure =", "= len(valid_prec) if prec_cnt == 0: prec_year = 999999 valid_prec = recs.query(\"5000 >", "filename = os.path.basename(srcPath) # year = int(filename[:4]) # mon = int(filename[4:6]) # day", "# for k, v in group.items(): # target = os.path.join(targetRoot, k) # recs_w", "prec_cnt, prec24_mon, prec24_cnt) return rec def statisticsYears(self, srcPathRoot, targetPathRoot): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot)", "# if index > last_rec: # break # sample = recs[index].split() # sid", "Surf4Hours Tool convert surf files organized \\ by day into files organized by", "\"YEAR == {0} & MON == {1}\".format(year, mon) recs = db.query(cond) if not", "999999, 999999, 999999) # for i in range(24)] # fo.writelines(recs_emt # today =", "= valid_prec.query(\"8 < HR <= 20\") prec12_am = am_prec[\"PREC\"].sum() prec12_am_cnt = len(am_prec) if", "calcDaily(self, sid, dt, hours24, stat_win): \"\"\" http://www.szmb.gov.cn/quf/2009/08/2017101815192310488.pdf \"\"\" if (len(hours24) > 24): self._logger.error((\"{1}-{2:0>2d}-{3:0>2d},", "filelist: # with open(os.path.join(parentDir, item), 'r+') as fo: # recs = fo.readlines() #", "> 800\") if len(valid_pressure) >= 10: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max() min_pres", "# 08-08, shuili subdir = \"sl0808\" dailyDir = os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir =", "group[items[0]].append(rec) # for k, v in group.items(): # target = os.path.join(targetRoot, k) #", "\"{0} < DATETIME <= {1}\".format(whf.strftime(\"%Y%m%d%H\"), wht.strftime(\"%Y%m%d%H\")) recs = db.query(cond) return recs def calcDaily(self,", "group.items(): # target = os.path.join(targetRoot, k) # recs_w = [ # strfmt.format(k, year,", "if len(valid_pressure) == 4: avg_pres = valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min()", "TEMP > -60 \\ & HR in [2, 8, 14, 20]\") if len(valid_temperature)", "avg_temp = valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else: avg_temp = 999999", "\" # \"Station {0} miss temperature\") # .format(sid, year, mon,)) # statistics precipation", ">= 10: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max() min_pres = valid_pressure[\"MIN_PRES\"].min() else: avg_pres", "items) # with open(target, 'a') as fo: # fo.writelines(recs_w) # fo.close() # def", "today # fo.seek(0) # fo.write(header) # index = 0 # last_rec = len(recs)", "# -*- coding: utf-8 -*- # COPYRIGHT 2016 igsnrr # # MORE INFO", "valid_prec = recs.query(\"5000 > PREC24_MON >= 0\") prec24_year = valid_prec[\"PREC24_MON\"].sum() prec24_cnt = len(valid_prec)", "datetime import pandas as pd from ..base.toolbase import ToolBase class Surf4HoursTool(ToolBase): \"\"\"The tool", "whf + timedelta(hours=24) cond = \"{0} < DATETIME <= {1}\".format(whf.strftime(\"%Y%m%d%H\"), wht.strftime(\"%Y%m%d%H\")) recs =", "= 999999 max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}, Station {0} miss", "ToolBase.__init__(self, \"Surf4HoursTool\", \"The Surf4Hours Tool convert surf files organized \\ by day into", "# self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0} miss pressure at\" # \"[02, 08, 14,", "# testing code # import sys # print(sys.argv) tool = Surf4HoursTool() import argparse", "year_end = y_series.max() + 1 # year_begin = 2015 # year_end = 2016", "= {} # strfmt = (\"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # for rec", "todo: do config the range of loop y_series = db[\"YEAR\"] endDay = date(y_series.max()+1,", "subdir, \"daily0808\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly0808\") yearDir = os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics", "999999 max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0}", "prec12_pm_cnt, prec12_am, prec12_am_cnt) else: rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid, dt.strftime(\"%Y%m%d\"), dt.year,", "if len(valid_temperature) >= 10: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max() min_temp = valid_temperature[\"MIN_TEMP\"].min()", "= valid_temperature[\"TEMP\"].min() else: valid_temperature = hours24.query(\"60> TEMP > -60 \\ & HR in", "4: avg_temp = valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else: avg_temp =", "subdir, \"daily2020\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly2020\") yearDir = os.path.join(targetRoot, subdir, \"year2020\") print(\"statistics", "= [] # with open(srcPath) as f: # recs = f.readlines() # recs", "len(valid_pressure) == 4: avg_pres = valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else:", "strfmt = ( # \"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # year = today.year", "avg_temp, max_temp, min_temp, prec_year, prec_cnt, prec24_year, prec24_cnt) return rec def clearDirectory(self, targetRoot): if", "= pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True) result = [] # todo: do config the range", "description=\"Surf4HoursTool Usage Guide\", prefix_chars=\"-+\") parser.add_argument(\"--version\", action=\"version\", version=\"%(prog)s 0.0.1\") tool.defineArgumentParser(parser) args = parser.parse_args() print(args)", "\"{:>4d}{:>10.1f}{:>6d}\\n\") \\ .format(sid, year, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec_year, prec_cnt, prec24_year,", "- timedelta(hours=df_hours) wht = whf + timedelta(hours=24) cond = \"{0} < DATETIME <=", "valid_temperature[\"TEMP\"].min() else: avg_temp = 999999 max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d},", "k, v in group.items(): # target = os.path.join(targetRoot, k) # recs_w = [", "srcPath, targetRoot): # if not os.path.exists(srcPath): # self._loggej.info(\"Failed: {0} does't existe\".format(srcPath)) # filename", "\"bystation\") # self.batchConvert(srcRoot, bystationDir) # 08-08, qixiang subdir = \"qx0808\" dailyDir = os.path.join(targetRoot,", "recs: # items = rec.split(\",\") # if items[0] not in group: # group[items[0]]", "min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0} miss temperature\") # .format(sid,", "dt.year, dt.month, dt.day)) else: # statistics pressure # valid_pressure = hours24.query(\"1200 > PRES", "\"\"\" if (len(hours24) > 24): self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \\ Station {0} has more than 24", "999999) # for i in range(24)] # for line in v: # items", "\"PREC24_MON\", \"CNT24\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def calcMonthly(self, sid,", "# self.clearDirectory(targetPathRoot) # filelist = sorted(os.listdir(srcPathRoot)) # for item in filelist: # srcPath", "int(sample[2]) # mon = int(sample[3]) # day = int(sample[4]) # nextday = date(year,", "2016 for year in range(year_begin, year_end): cond = \"YEAR == {0}\".format(year) recs =", "sid, srcPath, targetPath): db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True) result = [] # todo:", "# self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0} miss temperature at\" # \"[02, 08, 14,", "\"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC20_08\", \"C1\", \"PREC08_20\", \"C2\") with open(targetPath,", "line.split() # # try: # recs_w[int(items[5])] = line # # except: # #", ">= 24: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max() min_pres = valid_pressure[\"MIN_PRES\"].min() else: avg_pres", "True) time.sleep(1) if not os.path.exists(targetRoot): os.makedirs(targetRoot) if __name__ == \"__main__\": # testing code", "\"PREC24\", \"CNT\", \"PREC08_20\", \"C1\", \"PREC20_08\", \"C2\") else: header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\",", "4: avg_pres = valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else: avg_pres =", "= curDay + timedelta(days=1) if stat_win == \"0808\" or stat_win == \"0832\": header", "recs_w[int(items[5])] = line # # except: # # print(\"An exception occurred\", line, items)", "= valid_prec[\"PREC\"].sum() prec24_cnt = len(valid_prec) if prec24_cnt == 0: prec24 = 999999 am_prec", "= (\"{0:>8}{1:>12}{2:>6}{3:>4}{4:>4}{5:>4}\" # \"{6:>12}{7:>12}{8:>12}\\n\").format( # \"SID\", \"DATETIME\", \"YEAR\", # \"MON\", \"DAY\", \"HR\", #", "'r+') as fo: # recs = fo.readlines() # sample = recs[0].split() # sid", "else: valid_temperature = hours24.query(\"60> TEMP > -60 \\ & HR in [2, 8,", "import Logger parser = argparse.ArgumentParser(prog=\"python -m surf4hourstool\", description=\"Surf4HoursTool Usage Guide\", prefix_chars=\"-+\") parser.add_argument(\"--version\", action=\"version\",", "# srcRoot = args.source targetRoot = args.target # print(srcRoot, \"-->\", targetRoot) bystationDir =", "# group = {} # strfmt = (\"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") #", "Surf4HoursTool() import argparse from ..base.logger import Logger parser = argparse.ArgumentParser(prog=\"python -m surf4hourstool\", description=\"Surf4HoursTool", "= os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics qx0808\") self.statisticsDaily(bystationDir, dailyDir, \"0808\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir)", "subdir = \"sl0808\" dailyDir = os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir = os.path.join(targetRoot, subdir, \"monthly0808\")", "monthlyDir) self.statisticsYears(monthlyDir, yearDir) # def batchConvert(self, srcPathRoot, targetPathRoot): # self.clearDirectory(targetPathRoot) # filelist =", "if __name__ == \"__main__\": # testing code # import sys # print(sys.argv) tool", "i in range(24)] # fo.writelines(recs_emt # today = today + timedelta(days=1) # fo.flush()", "statisics for daily and monthly.\"\"\" def __init__(self): ToolBase.__init__(self, \"Surf4HoursTool\", \"The Surf4Hours Tool convert", "import os import shutil import time from datetime import date from datetime import", "20]\") # print(valid_temperature) if len(valid_temperature) == 24: # ok for 24 hours avg_temp", "= valid_pressure[\"MAX_PRES\"].max() min_pres = valid_pressure[\"MIN_PRES\"].min() else: avg_pres = 999999 max_pres = 999999 min_pres", "== \"0\" # rec = strfmt.format(items[0], int(items[1]), int(items[2]), # int(items[3]), int(items[4]), # float(items[5]),", "recs.query(\"5000 > PREC_MON >= 0\") prec_year = valid_prec[\"PREC_MON\"].sum() prec_cnt = len(valid_prec) if prec_cnt", "[2, 8, 14, 20]\")) if len(valid_pressure) == 4: avg_pres = valid_pressure[\"PRES\"].mean() max_pres =", "fo.close() # def insertHeader(self, parentDir): # header = (\"{0:>8}{1:>12}{2:>6}{3:>4}{4:>4}{5:>4}\" # \"{6:>12}{7:>12}{8:>12}\\n\").format( # \"SID\",", "& CNT == 24\") prec24_mon = valid_prec[\"PREC24\"].sum() prec24_cnt = len(valid_prec) if prec24_cnt ==", "date(y_series.min(), 1, 1) while(curDay < endDay): if stat_win == \"0808\": recs = self.queryData(db,", "rec = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid, dt.strftime(\"%Y%m%d\"), dt.year, dt.month, dt.day, avg_pres, max_pres,", "from ..base.logger import Logger parser = argparse.ArgumentParser(prog=\"python -m surf4hourstool\", description=\"Surf4HoursTool Usage Guide\", prefix_chars=\"-+\")", "hours24.query(\"60> TEMP > -60 \\ & HR in [2, 8, 14, 20]\") #", "mon,)) # statistics precipation valid_prec = recs.query(\"500 > PREC24 >= 0\") prec_mon =", "item) self.stasticsDailySingleStatation(item, srcPath, targetPath, stat_win) def stasticsDailySingleStatation(self, sid, srcPath, targetPath, stat_win): print(\"processing {0}\".format(srcPath))", "by day into files organized by station. and \\ statisics for daily and", "HR <= 20\") prec12_am = am_prec[\"PREC\"].sum() prec12_am_cnt = len(am_prec) if prec12_am_cnt == 0:", "if prec24_year == 0: prec24_year = 999999 rec = (\"{:>8}{:>6}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>4d}{:>10.1f}{:>6d}\\n\") \\", "[ # strfmt.format(k, year, mon, day, i, # 999999, 999999, 999999) # for", "\"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC20_08\", \"C1\", \"PREC08_20\", \"C2\") with open(targetPath, 'w')", "and statisics for daily and monthly.\"\"\" def __init__(self): ToolBase.__init__(self, \"Surf4HoursTool\", \"The Surf4Hours Tool", "db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATE\") result = [] # todo: do config", "max_pres, min_pres, avg_temp, max_temp, min_temp, prec_year, prec_cnt, prec24_year, prec24_cnt) return rec def clearDirectory(self,", "self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 08-08, shuili subdir = \"sl0808\" dailyDir = os.path.join(targetRoot,", "dt.day)) # statistics precipation valid_prec = hours24.query(\"200 > PREC >= 0\") prec24 =", "# print(valid_pressure) if len(valid_pressure) == 24: # ok for 24 hours avg_pres =", "# 08-08, qixiang subdir = \"qx0808\" dailyDir = os.path.join(targetRoot, subdir, \"daily0808\") monthlyDir =", "min_pres = valid_pressure[\"PRES\"].min() else: avg_pres = 999999 max_pres = 999999 min_pres = 999999", "PREC >= 0\") prec24 = valid_prec[\"PREC\"].sum() prec24_cnt = len(valid_prec) if prec24_cnt == 0:", "sample = recs[index].split() # sid = sample[0] # year = int(sample[2]) # mon", "# \"Station {0} miss pressure.\") # .format(sid, year, mon)) # statistics temperature valid_temperature", "min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0} miss pressure at\" #", "999999 valid_prec = recs.query(\"500 > PREC24 >= 0 & CNT == 24\") prec24_mon", "statistics temperature valid_temperature = recs.query(\"60 > AVG_TEMP > -60\") if len(valid_temperature) >= 10:", "prec12_am = 999999 prec12_pm = pm_prec[\"PREC\"].sum() prec12_pm_cnt = len(pm_prec) if prec12_pm_cnt == 0:", "valid_temperature[\"AVG_TEMP\"].mean() max_temp = valid_temperature[\"MAX_TEMP\"].max() min_temp = valid_temperature[\"MIN_TEMP\"].min() else: avg_temp = 999999 max_temp =", "avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec_mon, prec_cnt, prec24_mon, prec24_cnt) return rec def", "& HR in [2, 8, 14, 20]\")) # print(valid_pressure) if len(valid_pressure) == 24:", "monthlyDir) self.statisticsYears(monthlyDir, yearDir) # 20-20, qixiang subdir = \"qx2020\" dailyDir = os.path.join(targetRoot, subdir,", "\"C2\") else: header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\", \"DATE\", \"YEAR\", \"MON\", \"DAY\", \"AVG_PRES\",", "import time from datetime import date from datetime import timedelta, datetime import pandas", "year in range(year_begin, year_end): cond = \"YEAR == {0}\".format(year) recs = db.query(cond) if", "import timedelta, datetime import pandas as pd from ..base.toolbase import ToolBase class Surf4HoursTool(ToolBase):", "in [2, 8, 14, 20]\") # print(valid_temperature) if len(valid_temperature) == 24: # ok", "range of loop y_series = db[\"YEAR\"] year_begin = y_series.min() year_end = y_series.max() +", "\"YEAR\", \"MON\", \"DAY\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC20_08\", \"C1\",", "# \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # year = today.year # mon = today.month #", "y_series.max() + 1 for year in range(year_begin, year_end): for mon in range(1, 13):", "max_pres = valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else: avg_pres = 999999 max_pres = 999999", "= strfmt.format(items[0], int(items[1]), int(items[2]), # int(items[3]), int(items[4]), # float(items[5]), float(items[6]), # float(items[7])) #", "import sys # print(sys.argv) tool = Surf4HoursTool() import argparse from ..base.logger import Logger", "PREC24 >= 0\") prec_mon = valid_prec[\"PREC24\"].sum() prec_cnt = len(valid_prec) if prec_cnt == 0:", "for 24 hours avg_temp = valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else:", "valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else: avg_pres = 999999 max_pres =", "year, mon, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec_mon, prec_cnt, prec24_mon, prec24_cnt) return", "800\") if len(valid_pressure) >= 10: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max() min_pres =", "\"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC08_20\", \"C1\", \"PREC20_08\", \"C2\") else: header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\")", "prec24 = valid_prec[\"PREC\"].sum() prec24_cnt = len(valid_prec) if prec24_cnt == 0: prec24 = 999999", "max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}, \" # \"Station {0} miss", "\"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\" # \"{5:>12.1f}{6:>12.1f}{7:>12.1f}\\n\") # year = today.year # mon = today.month", "# for line in v: # items = line.split() # # try: #", "{0} miss temperature\") # .format(sid, year)) # statistics precipation valid_prec = recs.query(\"5000 >", "in range(year_begin, year_end): cond = \"YEAR == {0}\".format(year) recs = db.query(cond) if not", "if prec_cnt == 0: prec_year = 999999 valid_prec = recs.query(\"5000 > PREC24_MON >=", "PREC_MON >= 0\") prec_year = valid_prec[\"PREC_MON\"].sum() prec_cnt = len(valid_prec) if prec_cnt == 0:", "\\ .format(sid, dt.strftime(\"%Y%m%d\"), dt.year, dt.month, dt.day, avg_pres, max_pres, min_pres, avg_temp, max_temp, min_temp, prec24,", "day into files organized by station. and \\ statisics for daily and monthly.\")", "prec24_cnt) return rec def clearDirectory(self, targetRoot): if os.path.exists(targetRoot) and len(os.listdir(targetRoot)) > 0: print(\"\\nThe", "= valid_temperature[\"MAX_TEMP\"].max() min_temp = valid_temperature[\"MIN_TEMP\"].min() else: avg_temp = 999999 max_temp = 999999 min_temp", "= y_series.max() + 1 for year in range(year_begin, year_end): for mon in range(1,", "os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsMonthSingleStatation(item, srcPath, targetPath) def stasticsMonthSingleStatation(self, sid, srcPath,", "prec24_cnt == 0: prec24_mon = 999999 rec = (\"{:>8}{:>6}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}{:>4d}\" \"{:>12.1f}{:>6d}\\n\") \\ .format(sid,", "stat_win == \"0808\" or stat_win == \"0832\": header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\",", "os.path.join(targetRoot, subdir, \"monthly0808\") yearDir = os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics sl0808\") self.statisticsDaily(bystationDir, dailyDir, \"0832\")", "\"monthly0808\") yearDir = os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics qx0808\") self.statisticsDaily(bystationDir, dailyDir, \"0808\") self.statisticsMonthly(dailyDir, monthlyDir)", "\"{:>10}{:>10}{:>10}{:>4}{:>10}{:>6}\\n\").format( \"SID\", \"YEAR\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_Y\", \"CNT\", \"PREC24_Y\", \"CNT24\")", "min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0} miss temperature at\" #", "= int(sample[3]) # day = int(sample[4]) # today = date(year, mon, day) #", "\"__main__\": # testing code # import sys # print(sys.argv) tool = Surf4HoursTool() import", "recs.empty: mon_rec = self.calcYear(sid, year, recs) result.append(mon_rec) header = (\"{:>8}{:>6}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>6}\\n\").format( \"SID\", \"YEAR\",", "24 hours avg_temp = valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else: valid_temperature", "14, 20]\") # print(valid_temperature) if len(valid_temperature) == 24: # ok for 24 hours", "for mon in range(1, 13): cond = \"YEAR == {0} & MON ==", "# rec = strfmt.format(items[0], int(items[1]), int(items[2]), # int(items[3]), int(items[4]), # float(items[5]), float(items[6]), #", "= os.path.join(targetRoot, subdir, \"monthly0808\") yearDir = os.path.join(targetRoot, subdir, \"year0808\") print(\"statistics qx0808\") self.statisticsDaily(bystationDir, dailyDir,", "# srcPath = os.path.join(srcPathRoot, item) # print(srcPath) # self.convert(srcPath, targetPathRoot) # self.insertHeader(targetPathRoot) #", "> 600\") # temporary change valid_pressure = hours24.query((\"1200> PRES > 600 \\ &", "print(srcPath) # self.convert(srcPath, targetPathRoot) # self.insertHeader(targetPathRoot) # def convert(self, srcPath, targetRoot): # if", "valid_pressure[\"MIN_PRES\"].min() else: avg_pres = 999999 max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d},", "self.convert(srcPath, targetPathRoot) # self.insertHeader(targetPathRoot) # def convert(self, srcPath, targetRoot): # if not os.path.exists(srcPath):", "\"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid, dt.strftime(\"%Y%m%d\"), dt.year, dt.month, dt.day, avg_pres, max_pres, min_pres, avg_temp, max_temp,", "stat_win) if day_rec is not None: result.append(day_rec) curDay = curDay + timedelta(days=1) if", "1 for year in range(year_begin, year_end): for mon in range(1, 13): cond =", "= sample[0] # year = int(sample[2]) # mon = int(sample[3]) # day =", "target = os.path.join(targetRoot, k) # recs_w = [ # strfmt.format(k, year, mon, day,", "= line.split() # # try: # recs_w[int(items[5])] = line # # except: #", "min_pres, avg_temp, max_temp, min_temp, prec_mon, prec_cnt, prec24_mon, prec24_cnt) return rec def statisticsYears(self, srcPathRoot,", "organized \\ by day into files organized by station. and \\ statisics for", "fo.close() def statisticsDaily(self, srcPathRoot, targetPathRoot, stat_win): self.clearDirectory(targetPathRoot) filelist = os.listdir(srcPathRoot) for item in", "[2, 8, 14, 20]\") if len(valid_temperature) == 4: avg_temp = valid_temperature[\"TEMP\"].mean() max_temp =", "Logger parser = argparse.ArgumentParser(prog=\"python -m surf4hourstool\", description=\"Surf4HoursTool Usage Guide\", prefix_chars=\"-+\") parser.add_argument(\"--version\", action=\"version\", version=\"%(prog)s", "14, 20]\") # .format(sid, dt.year, # dt.month, dt.day)) # statistics temperature # valid_temperature", "999999 # self._logger.error((\"{1}, Station {0} miss temperature\") # .format(sid, year)) # statistics precipation", "\"C1\", \"PREC08_20\", \"C2\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def queryData(self,", "statistics precipation valid_prec = recs.query(\"5000 > PREC_MON >= 0\") prec_year = valid_prec[\"PREC_MON\"].sum() prec_cnt", "surf files orgarnized by month into files by station IDs and statisics for", "len(valid_prec) if prec24_cnt == 0: prec24_mon = 999999 rec = (\"{:>8}{:>6}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}{:>4d}\" \"{:>12.1f}{:>6d}\\n\")", "# statistics pressure # valid_pressure = hours24.query(\"1200 > PRES > 600\") # temporary", "pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATE\") result = [] # todo: do config the range", "# index = index + 24 # if index > last_rec: # break", "wht.strftime(\"%Y%m%d%H\")) recs = db.query(cond) return recs def calcDaily(self, sid, dt, hours24, stat_win): \"\"\"", "= recs.query(\"500 > PREC24 >= 0 & CNT == 24\") prec24_mon = valid_prec[\"PREC24\"].sum()", "= fo.readlines() # sample = recs[0].split() # sid = sample[0] # year =", "designed to convert surf files orgarnized by month into files by station IDs", "max_temp = 999999 min_temp = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0} miss", "0.0.1\" def defineArgumentParser(self, parser): # parser.add_argument(\"source\", action=\"store\", # help=\"root dir for source files\")", "rec in recs: # items = rec.split(\",\") # if items[0] not in group:", "999999 rec = (\"{:>8}{:>6}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}{:>10.1f}{:>4d}\" \"{:>12.1f}{:>6d}\\n\") \\ .format(sid, year, mon, avg_pres, max_pres, min_pres,", "date(y_series.max()+1, 1, 1) curDay = date(y_series.min(), 1, 1) while(curDay < endDay): if stat_win", "\"year0808\") print(\"statistics sl0808\") self.statisticsDaily(bystationDir, dailyDir, \"0832\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # def batchConvert(self,", ".format(sid, dt.year, # dt.month, dt.day)) # statistics precipation valid_prec = hours24.query(\"200 > PREC", "# if nextday == today: # fo.writelines(recs[index: index+24]) # index = index +", "= valid_pressure[\"PRES\"].mean() max_pres = valid_pressure[\"PRES\"].max() min_pres = valid_pressure[\"PRES\"].min() else: avg_pres = 999999 max_pres", "if not recs.empty: day_rec = self.calcDaily(sid, curDay, recs, stat_win) if day_rec is not", "# with open(os.path.join(parentDir, item), 'r+') as fo: # recs = fo.readlines() # sample", "am_prec = valid_prec.query(\"HR <=8 | HR>20\") pm_prec = valid_prec.query(\"8 < HR <= 20\")", "AVG_TEMP > -60\") # print(valid_temperature) if len(valid_temperature) >= 24: avg_temp = valid_temperature[\"AVG_TEMP\"].mean() max_temp", ".format(sid, year, mon,)) # statistics precipation valid_prec = recs.query(\"500 > PREC24 >= 0\")", "pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True) result = [] # todo: do config the range of", "(\"{:>8}{:>6}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>6}\\n\").format( \"SID\", \"YEAR\", \"AVG_PRES\", \"MAX_PRES\", \"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC_Y\", \"CNT\", \"PREC24_Y\",", "# \"Station {0} miss pressure at\" # \"[02, 08, 14, 20]\") # .format(sid,", "(\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>10.1f}{:>10.1f}\" \"{:>10.1f}{:>4d}{:>10.1f}{:>4d}{:>10.1f}{:>4d}\\n\") \\ .format(sid, dt.strftime(\"%Y%m%d\"), dt.year, dt.month, dt.day, avg_pres, max_pres, min_pres, avg_temp,", "self.stasticsDailySingleStatation(item, srcPath, targetPath, stat_win) def stasticsDailySingleStatation(self, sid, srcPath, targetPath, stat_win): print(\"processing {0}\".format(srcPath)) db", "= valid_temperature[\"TEMP\"].min() else: avg_temp = 999999 max_temp = 999999 min_temp = 999999 #", "in range(1, 13): cond = \"YEAR == {0} & MON == {1}\".format(year, mon)", "recs[1:] # f.close() # group = {} # strfmt = (\"{0:>8}{1:>6d}{2:0>2d}{3:0>2d}{4:0>2d}\" # \"{1:>6d}{2:>4d}{3:>4d}{4:>4d}\"", "db.query(cond) if not recs.empty: mon_rec = self.calcMonthly(sid, year, mon, recs) result.append(mon_rec) header =", "\"YEAR == {0}\".format(year) recs = db.query(cond) if not recs.empty: mon_rec = self.calcYear(sid, year,", "rec def clearDirectory(self, targetRoot): if os.path.exists(targetRoot) and len(os.listdir(targetRoot)) > 0: print(\"\\nThe dir of", "print(valid_pressure) if len(valid_pressure) >= 24: avg_pres = valid_pressure[\"AVG_PRES\"].mean() max_pres = valid_pressure[\"MAX_PRES\"].max() min_pres =", "20\") prec12_am = am_prec[\"PREC\"].sum() prec12_am_cnt = len(am_prec) if prec12_am_cnt == 0: prec12_am =", "# while index < last_rec: # if nextday == today: # fo.writelines(recs[index: index+24])", "item in filelist: srcPath = os.path.join(srcPathRoot, item) targetPath = os.path.join(targetPathRoot, item) self.stasticsMonthSingleStatation(item, srcPath,", "dt.year, # dt.month, dt.day)) # statistics precipation valid_prec = hours24.query(\"200 > PREC >=", "if prec12_am_cnt == 0: prec12_am = 999999 prec12_pm = pm_prec[\"PREC\"].sum() prec12_pm_cnt = len(pm_prec)", "srcPathRoot, targetPathRoot): # self.clearDirectory(targetPathRoot) # filelist = sorted(os.listdir(srcPathRoot)) # for item in filelist:", "dailyDir, \"0832\") self.statisticsMonthly(dailyDir, monthlyDir) self.statisticsYears(monthlyDir, yearDir) # def batchConvert(self, srcPathRoot, targetPathRoot): # self.clearDirectory(targetPathRoot)", "\"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC08_20\", \"C1\", \"PREC20_08\", \"C2\") else: header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\", "# day = int(sample[4]) # nextday = date(year, mon, day) # else: #", "hours avg_temp = valid_temperature[\"TEMP\"].mean() max_temp = valid_temperature[\"TEMP\"].max() min_temp = valid_temperature[\"TEMP\"].min() else: valid_temperature =", "self._logger.error((\"{1}, Station {0} miss temperature\") # .format(sid, year)) # statistics precipation valid_prec =", "= recs[0].split() # sid = sample[0] # year = int(sample[2]) # mon =", "{0} & MON == {1}\".format(year, mon) recs = db.query(cond) if not recs.empty: mon_rec", "stat_win == \"0832\": header = (\"{:>8}{:>10}{:>6}{:>4}{:>4}{:>10}{:>10}{:>10}{:>10}\" \"{:>10}{:>10}{:>10}{:>4}{:>10}{:>4}{:>10}{:>4}\\n\") \\ .format(\"SID\", \"DATE\", \"YEAR\", \"MON\", \"DAY\",", "999999 max_pres = 999999 min_pres = 999999 # self._logger.error((\"{1}-{2:0>2d}-{3:0>2d}, \" # \"Station {0}", "... # email: import os import shutil import time from datetime import date", "targetPath = os.path.join(targetPathRoot, item) self.stasticsDailySingleStatation(item, srcPath, targetPath, stat_win) def stasticsDailySingleStatation(self, sid, srcPath, targetPath,", "y_series = db[\"YEAR\"] endDay = date(y_series.max()+1, 1, 1) curDay = date(y_series.min(), 1, 1)", "> PREC_MON >= 0\") prec_year = valid_prec[\"PREC_MON\"].sum() prec_cnt = len(valid_prec) if prec_cnt ==", "with open(target, 'a') as fo: # fo.writelines(recs_w) # fo.close() # def insertHeader(self, parentDir):", "temperature valid_temperature = recs.query(\"60 > AVG_TEMP > -60\") # print(valid_temperature) if len(valid_temperature) >=", "# statistics temperature valid_temperature = recs.query(\"60 > AVG_TEMP > -60\") if len(valid_temperature) >=", "from datetime import timedelta, datetime import pandas as pd from ..base.toolbase import ToolBase", "valid_prec = recs.query(\"5000 > PREC_MON >= 0\") prec_year = valid_prec[\"PREC_MON\"].sum() prec_cnt = len(valid_prec)", "statistics pressure valid_pressure = recs.query(\"1200 > AVG_PRES > 800\") # print(valid_pressure) if len(valid_pressure)", "> PREC24 >= 0 & CNT == 24\") prec24_mon = valid_prec[\"PREC24\"].sum() prec24_cnt =", "hours24.query(\"200 > PREC >= 0\") prec24 = valid_prec[\"PREC\"].sum() prec24_cnt = len(valid_prec) if prec24_cnt", "prec12_pm_cnt = len(pm_prec) if prec12_pm_cnt == 0: prec12_pm = 999999 if stat_win ==", "exception occurred\", line, items) # with open(target, 'a') as fo: # fo.writelines(recs_w) #", "< endDay): if stat_win == \"0808\": recs = self.queryData(db, curDay, 16) elif stat_win", "= os.path.basename(srcPath) # year = int(filename[:4]) # mon = int(filename[4:6]) # day =", "print(sys.argv) tool = Surf4HoursTool() import argparse from ..base.logger import Logger parser = argparse.ArgumentParser(prog=\"python", "range(1, 13): cond = \"YEAR == {0} & MON == {1}\".format(year, mon) recs", "last_rec = len(recs) - 1 # while index < last_rec: # if nextday", "13): cond = \"YEAR == {0} & MON == {1}\".format(year, mon) recs =", "= db.query(cond) if not recs.empty: mon_rec = self.calcYear(sid, year, recs) result.append(mon_rec) header =", "y_series.max() + 1 # year_begin = 2015 # year_end = 2016 for year", "= int(filename[:4]) # mon = int(filename[4:6]) # day = int(filename[6:8]) # recs =", "return recs def calcDaily(self, sid, dt, hours24, stat_win): \"\"\" http://www.szmb.gov.cn/quf/2009/08/2017101815192310488.pdf \"\"\" if (len(hours24)", "= os.path.join(targetRoot, subdir, \"monthly2020\") yearDir = os.path.join(targetRoot, subdir, \"year2020\") print(\"statistics qx2020\") self.statisticsDaily(bystationDir, dailyDir,", "\"PREC_MON\", \"CNT\", \"PREC24_MON\", \"CNT24\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def", "day_rec is not None: result.append(day_rec) curDay = curDay + timedelta(days=1) if stat_win ==", "\"MIN_PRES\", \"AVG_TEMP\", \"MAX_TEMP\", \"MIN_TEMP\", \"PREC24\", \"CNT\", \"PREC08_20\", \"C1\", \"PREC20_08\", \"C2\") else: header =", "recs[index].split() # sid = sample[0] # year = int(sample[2]) # mon = int(sample[3])", "> -60 \\ & HR in [2, 8, 14, 20]\") # print(valid_temperature) if", "600\") # temporary change valid_pressure = hours24.query((\"1200> PRES > 600 \\ & HR", "> PREC >= 0\") prec24 = valid_prec[\"PREC\"].sum() prec24_cnt = len(valid_prec) if prec24_cnt ==", "\"CNT\", \"PREC24_Y\", \"CNT24\") with open(targetPath, 'w') as fo: fo.write(header) fo.writelines(result) fo.close() def calcYear(self,", "while(curDay < endDay): if stat_win == \"0808\": recs = self.queryData(db, curDay, 16) elif", "# group[items[0]].append(rec) # for k, v in group.items(): # target = os.path.join(targetRoot, k)", "range(24)] # fo.writelines(recs_emt # today = today + timedelta(days=1) # fo.flush() # fo.close()", "= pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True, index_col=\"DATE\") result = [] # todo: do config the", "else: valid_pressure = hours24.query((\"1200> PRES > 600 \\ & HR in [2, 8,", "def calcMonthly(self, sid, year, mon, recs): if len(recs) > 0: # statistics pressure", "stasticsYearSingleStatation(self, sid, srcPath, targetPath): db = pd.read_table(srcPath, skip_blank_lines=True, delim_whitespace=True) result = [] #" ]
[ "import LinearLocator, FormatStrFormatter from UQpy.surrogates import * from UQpy.distributions import Uniform, JointIndependent #", "set of 2D data. .. math:: f(x) = x_1^2 + x_2^2 **Description:** Dimensions:", "import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D from matplotlib import cm from", "LASSO. # %% polynomial_basis = TotalDegreeBasis(joint, max_degree) lasso = LassoRegression() pce2 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis,", "regression_method=ridge) pce3.fit(x,y) # %% md # # PCE surrogate is used to predict", "a given set of 2D data. .. math:: f(x) = x_1^2 + x_2^2", "PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=ridge) pce3.fit(x,y) # %% md # # PCE surrogate is used to", "%% md # # Visualize the 2D function. # %% xmin, xmax =", "Ridge regression. # %% polynomial_basis = TotalDegreeBasis(joint, max_degree) ridge = RidgeRegression() pce3 =", "optimization, 2, 1-15. \"\"\" # %% md # # Import necessary libraries. #", "# Import necessary libraries. # %% import numpy as np import matplotlib.pyplot as", "y_test, s=1) ax.set_title('PCE predictor') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlim(-6,6) ax.set_ylim(-6,6) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15)", "example, PCE is used to generate a surrogate model for a given set", "for all :math:`i = 1,2`. **Global minimum:** :math:`f(x^*)=0,` at :math:`x^* = (0,0)`. **Reference:**", "# Define the function. # %% def function(x,y): return x**2 + y**2 #", "PCE coefficients using least squares regression. # %% max_degree = 3 polynomial_basis =", "problem: an introduction. Towards global optimization, 2, 1-15. \"\"\" # %% md #", "fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20, 140) fig.colorbar(surf, shrink=0.5, aspect=7) plt.show() # %%", "of points f = function(X1_, X2_) fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') surf", "fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %% md # # Create an object from", "Create an object from the PCE class. Compute PCE coefficients using least squares", "%% import numpy as np import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D", "= fig.gca(projection='3d') ax.scatter(x_test[:,0], x_test[:,1], y_test, s=1) ax.set_title('PCE predictor') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlim(-6,6) ax.set_ylim(-6,6)", "used to generate a surrogate model for a given set of 2D data.", "print('Mean rel. error, LSTSQ:', error) print('Mean rel. error, LASSO:', error2) print('Mean rel. error,", "# %% md # # Visualize the 2D function. # %% xmin, xmax", "from matplotlib import cm from matplotlib.ticker import LinearLocator, FormatStrFormatter from UQpy.surrogates import *", "pce2.predict(x_val).flatten() y_pce3 = pce3.predict(x_val).flatten() # mean relative validation errors error = np.sum(np.abs((y_val -", "errors error = np.sum(np.abs((y_val - y_pce)/y_val))/n_samples error2 = np.sum(np.abs((y_val - y_pce2)/y_val))/n_samples error3 =", "<NAME>., & <NAME>. (1978). The global optimization problem: an introduction. Towards global optimization,", "grid of points f = function(X1_, X2_) fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d')", "= np.sum(np.abs((y_val - y_pce)/y_val))/n_samples error2 = np.sum(np.abs((y_val - y_pce2)/y_val))/n_samples error3 = np.sum(np.abs((y_val -", "at :math:`x^* = (0,0)`. **Reference:** <NAME>., & <NAME>. (1978). The global optimization problem:", "X2_ = np.meshgrid(X1, X2) # grid of points f = function(X1_, X2_) fig", "PCE surrogate. # %% n_mc = 1000000 x_mc = joint.rvs(n_mc) y_mc = function(x_mc[:,0],", "ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %% md # # Create an object", "%% n_test_samples = 10000 x_test = joint.rvs(n_test_samples) y_test = pce.predict(x_test) # %% md", "LSTSQ:', error) print('Mean rel. error, LASSO:', error2) print('Mean rel. error, Ridge:', error3) #", "plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') surf = ax.plot_surface(X1_, X2_, f, rstride=1, cstride=1, cmap='gnuplot2', linewidth=0,", "used to predict the behavior of the function at new samples. # %%", "# grid of points f = function(X1_, X2_) fig = plt.figure(figsize=(10,6)) ax =", "= np.var(y_mc) print('Moments from least squares regression :', pce.get_moments()) print('Moments from LASSO regression", "error2 = np.sum(np.abs((y_val - y_pce2)/y_val))/n_samples error3 = np.sum(np.abs((y_val - y_pce3)/y_val))/n_samples print('Mean rel. error,", "y_pce3)/y_val))/n_samples print('Mean rel. error, LSTSQ:', error) print('Mean rel. error, LASSO:', error2) print('Mean rel.", "f = function(X1_, X2_) fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') surf = ax.plot_surface(X1_,", "# %% fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x[:,0], x[:,1], y, s=20, c='r')", "**Reference:** <NAME>., & <NAME>. (1978). The global optimization problem: an introduction. Towards global", "ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20, 140) fig.colorbar(surf, shrink=0.5, aspect=7) plt.show() # %% md # #", "Sphere function (2 random inputs, scalar output) ====================================================================== In this example, PCE is", "plt.show() # %% md # Error Estimation # ----------------- # Construct a validation", "plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x_test[:,0], x_test[:,1], y_test, s=1) ax.set_title('PCE predictor') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140)", "surrogate model for a given set of 2D data. .. math:: f(x) =", "evaluate the function at the samples. # %% np.random.seed(1) dist_1 = Uniform(loc=-5.12, scale=10.24)", "y_pce2 = pce2.predict(x_val).flatten() y_pce3 = pce3.predict(x_val).flatten() # mean relative validation errors error =", "new samples. # %% n_test_samples = 10000 x_test = joint.rvs(n_test_samples) y_test = pce.predict(x_test)", "= 10000 x_test = joint.rvs(n_test_samples) y_test = pce.predict(x_test) # %% md # #", "import * from UQpy.distributions import Uniform, JointIndependent # %% md # # Define", "PCE predictions y_pce = pce.predict(x_val).flatten() y_pce2 = pce2.predict(x_val).flatten() y_pce3 = pce3.predict(x_val).flatten() # mean", "= fig.gca(projection='3d') ax.scatter(x[:,0], x[:,1], y, s=20, c='r') ax.set_title('Training data') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlabel('$x_1$',", "# Moment Estimation # ----------------- # Returns mean and variance of the PCE", "libraries. # %% import numpy as np import matplotlib.pyplot as plt from mpl_toolkits.mplot3d", "an object from the PCE class. Compute PCE coefficients using least squares regression.", "= joint.rvs(n_samples) y_val = function(x_val[:,0], x_val[:,1]) # PCE predictions y_pce = pce.predict(x_val).flatten() y_pce2", "= pce.predict(x_val).flatten() y_pce2 = pce2.predict(x_val).flatten() y_pce3 = pce3.predict(x_val).flatten() # mean relative validation errors", "X1_, X2_ = np.meshgrid(X1, X2) # grid of points f = function(X1_, X2_)", "error. # %% # validation sample n_samples = 150 x_val = joint.rvs(n_samples) y_val", "function(x[:,0], x[:,1]) # %% md # # Visualize the 2D function. # %%", "50) X1_, X2_ = np.meshgrid(X1, X2) # grid of points f = function(X1_,", "the behavior of the function at new samples. # %% n_test_samples = 10000", "relative validation errors error = np.sum(np.abs((y_val - y_pce)/y_val))/n_samples error2 = np.sum(np.abs((y_val - y_pce2)/y_val))/n_samples", "= np.linspace(ymin, ymax, 50) X1_, X2_ = np.meshgrid(X1, X2) # grid of points", "# Error Estimation # ----------------- # Construct a validation dataset and get the", "# # PCE surrogate is used to predict the behavior of the function", "surrogate is used to predict the behavior of the function at new samples.", "import Axes3D from matplotlib import cm from matplotlib.ticker import LinearLocator, FormatStrFormatter from UQpy.surrogates", "# mean relative validation errors error = np.sum(np.abs((y_val - y_pce)/y_val))/n_samples error2 = np.sum(np.abs((y_val", "max_degree) ridge = RidgeRegression() pce3 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=ridge) pce3.fit(x,y) # %% md #", "TotalDegreeBasis(joint, max_degree) least_squares = LeastSquareRegression() pce = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=least_squares) pce.fit(x,y) # %% md", "n_mc = 1000000 x_mc = joint.rvs(n_mc) y_mc = function(x_mc[:,0], x_mc[:,1]) mean_mc = np.mean(y_mc)", "* from UQpy.distributions import Uniform, JointIndependent # %% md # # Define the", "using LASSO. # %% polynomial_basis = TotalDegreeBasis(joint, max_degree) lasso = LassoRegression() pce2 =", "error = np.sum(np.abs((y_val - y_pce)/y_val))/n_samples error2 = np.sum(np.abs((y_val - y_pce2)/y_val))/n_samples error3 = np.sum(np.abs((y_val", "generate a surrogate model for a given set of 2D data. .. math::", "fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') surf = ax.plot_surface(X1_, X2_, f, rstride=1, cstride=1,", "----------------- # Returns mean and variance of the PCE surrogate. # %% n_mc", "= joint.rvs(n_samples) y = function(x[:,0], x[:,1]) # %% md # # Visualize the", "LASSO:', error2) print('Mean rel. error, Ridge:', error3) # %% md # Moment Estimation", "fontsize=15) plt.show() # %% md # Error Estimation # ----------------- # Construct a", "cmap='gnuplot2', linewidth=0, antialiased=False) ax.set_title('True function') ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20, 140)", "lasso = LassoRegression() pce2 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=lasso) pce2.fit(x,y) # %% md # #", "np.sum(np.abs((y_val - y_pce2)/y_val))/n_samples error3 = np.sum(np.abs((y_val - y_pce3)/y_val))/n_samples print('Mean rel. error, LSTSQ:', error)", "polynomial_basis = TotalDegreeBasis(joint, max_degree) lasso = LassoRegression() pce2 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=lasso) pce2.fit(x,y) #", "md # # PCE surrogate is used to predict the behavior of the", "= TotalDegreeBasis(joint, max_degree) ridge = RidgeRegression() pce3 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=ridge) pce3.fit(x,y) # %%", "Compute PCE coefficients using least squares regression. # %% max_degree = 3 polynomial_basis", "-6,6 ymin, ymax = -6,6 X1 = np.linspace(xmin, xmax, 50) X2 = np.linspace(ymin,", "# %% md # # Plot PCE prediction. # %% fig = plt.figure(figsize=(10,6))", "**Input Domain:** This function is evaluated on the hypercube :math:`x_i \\in [-5.12, 5.12]`", "the 2D function. # %% xmin, xmax = -6,6 ymin, ymax = -6,6", "validation sample n_samples = 150 x_val = joint.rvs(n_samples) y_val = function(x_val[:,0], x_val[:,1]) #", "Compute PCE coefficients using LASSO. # %% polynomial_basis = TotalDegreeBasis(joint, max_degree) lasso =", "from UQpy.surrogates import * from UQpy.distributions import Uniform, JointIndependent # %% md #", "(1978). The global optimization problem: an introduction. Towards global optimization, 2, 1-15. \"\"\"", "joint.rvs(n_samples) y = function(x[:,0], x[:,1]) # %% md # # Visualize the 2D", "pce.predict(x_val).flatten() y_pce2 = pce2.predict(x_val).flatten() y_pce3 = pce3.predict(x_val).flatten() # mean relative validation errors error", "on the hypercube :math:`x_i \\in [-5.12, 5.12]` for all :math:`i = 1,2`. **Global", "pce = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=least_squares) pce.fit(x,y) # %% md # # Compute PCE coefficients", "mpl_toolkits.mplot3d import Axes3D from matplotlib import cm from matplotlib.ticker import LinearLocator, FormatStrFormatter from", "Sinusoidal Function Sphere function (2 random inputs, scalar output) ====================================================================== In this example,", "PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=lasso) pce2.fit(x,y) # %% md # # Compute PCE coefficients with Ridge", "minimum:** :math:`f(x^*)=0,` at :math:`x^* = (0,0)`. **Reference:** <NAME>., & <NAME>. (1978). The global", "%% # validation sample n_samples = 150 x_val = joint.rvs(n_samples) y_val = function(x_val[:,0],", "y_pce)/y_val))/n_samples error2 = np.sum(np.abs((y_val - y_pce2)/y_val))/n_samples error3 = np.sum(np.abs((y_val - y_pce3)/y_val))/n_samples print('Mean rel.", "[dist_1, dist_2] joint = JointIndependent(marginals=marg) n_samples = 100 x = joint.rvs(n_samples) y =", "pce.predict(x_test) # %% md # # Plot PCE prediction. # %% fig =", "squares regression :', pce.get_moments()) print('Moments from LASSO regression :', pce2.get_moments()) print('Moments from Ridge", "cstride=1, cmap='gnuplot2', linewidth=0, antialiased=False) ax.set_title('True function') ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,", "FormatStrFormatter from UQpy.surrogates import * from UQpy.distributions import Uniform, JointIndependent # %% md", "print('Mean rel. error, LASSO:', error2) print('Mean rel. error, Ridge:', error3) # %% md", "5.12]` for all :math:`i = 1,2`. **Global minimum:** :math:`f(x^*)=0,` at :math:`x^* = (0,0)`.", "= [dist_1, dist_2] joint = JointIndependent(marginals=marg) n_samples = 100 x = joint.rvs(n_samples) y", "= pce.predict(x_test) # %% md # # Plot PCE prediction. # %% fig", "y_val = function(x_val[:,0], x_val[:,1]) # PCE predictions y_pce = pce.predict(x_val).flatten() y_pce2 = pce2.predict(x_val).flatten()", "# # Import necessary libraries. # %% import numpy as np import matplotlib.pyplot", "= PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=least_squares) pce.fit(x,y) # %% md # # Compute PCE coefficients using", "predictions y_pce = pce.predict(x_val).flatten() y_pce2 = pce2.predict(x_val).flatten() y_pce3 = pce3.predict(x_val).flatten() # mean relative", "# %% xmin, xmax = -6,6 ymin, ymax = -6,6 X1 = np.linspace(xmin,", "Dimensions: 2 **Input Domain:** This function is evaluated on the hypercube :math:`x_i \\in", "ax.view_init(20,140) ax.set_xlim(-6,6) ax.set_ylim(-6,6) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %% md # Error", "Uniform, JointIndependent # %% md # # Define the function. # %% def", "of the PCE surrogate. # %% n_mc = 1000000 x_mc = joint.rvs(n_mc) y_mc", "print('Moments from Ridge regression :', pce3.get_moments()) print('Moments from Monte Carlo integration: ', mean_mc,", "ax.set_ylabel('$x_2$', fontsize=15) ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20, 140) fig.colorbar(surf, shrink=0.5, aspect=7) plt.show() # %% md", ".. math:: f(x) = x_1^2 + x_2^2 **Description:** Dimensions: 2 **Input Domain:** This", "= PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=ridge) pce3.fit(x,y) # %% md # # PCE surrogate is used", "ymax, 50) X1_, X2_ = np.meshgrid(X1, X2) # grid of points f =", "%% n_mc = 1000000 x_mc = joint.rvs(n_mc) y_mc = function(x_mc[:,0], x_mc[:,1]) mean_mc =", "dist_1 = Uniform(loc=-5.12, scale=10.24) dist_2 = Uniform(loc=-5.12, scale=10.24) marg = [dist_1, dist_2] joint", "regression_method=lasso) pce2.fit(x,y) # %% md # # Compute PCE coefficients with Ridge regression.", "**Description:** Dimensions: 2 **Input Domain:** This function is evaluated on the hypercube :math:`x_i", "this example, PCE is used to generate a surrogate model for a given", "training data. # %% fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x[:,0], x[:,1], y,", "Create a distribution object, generate samples and evaluate the function at the samples.", "xmin, xmax = -6,6 ymin, ymax = -6,6 X1 = np.linspace(xmin, xmax, 50)", "= function(X1_, X2_) fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') surf = ax.plot_surface(X1_, X2_,", "# %% md # # Visualize training data. # %% fig = plt.figure(figsize=(10,6))", "variance of the PCE surrogate. # %% n_mc = 1000000 x_mc = joint.rvs(n_mc)", "RidgeRegression() pce3 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=ridge) pce3.fit(x,y) # %% md # # PCE surrogate", "# %% import numpy as np import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import", "# # Compute PCE coefficients using LASSO. # %% polynomial_basis = TotalDegreeBasis(joint, max_degree)", "x_test = joint.rvs(n_test_samples) y_test = pce.predict(x_test) # %% md # # Plot PCE", "dist_2] joint = JointIndependent(marginals=marg) n_samples = 100 x = joint.rvs(n_samples) y = function(x[:,0],", "x**2 + y**2 # %% md # # Create a distribution object, generate", "marg = [dist_1, dist_2] joint = JointIndependent(marginals=marg) n_samples = 100 x = joint.rvs(n_samples)", "is evaluated on the hypercube :math:`x_i \\in [-5.12, 5.12]` for all :math:`i =", "= PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=lasso) pce2.fit(x,y) # %% md # # Compute PCE coefficients with", "function(x_val[:,0], x_val[:,1]) # PCE predictions y_pce = pce.predict(x_val).flatten() y_pce2 = pce2.predict(x_val).flatten() y_pce3 =", "mean and variance of the PCE surrogate. # %% n_mc = 1000000 x_mc", "coefficients with Ridge regression. # %% polynomial_basis = TotalDegreeBasis(joint, max_degree) ridge = RidgeRegression()", "function(X1_, X2_) fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') surf = ax.plot_surface(X1_, X2_, f,", "ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %% md # # Create", "= (0,0)`. **Reference:** <NAME>., & <NAME>. (1978). The global optimization problem: an introduction.", "X2 = np.linspace(ymin, ymax, 50) X1_, X2_ = np.meshgrid(X1, X2) # grid of", "mean relative validation errors error = np.sum(np.abs((y_val - y_pce)/y_val))/n_samples error2 = np.sum(np.abs((y_val -", "plt.show() # %% md # # Visualize training data. # %% fig =", "samples. # %% n_test_samples = 10000 x_test = joint.rvs(n_test_samples) y_test = pce.predict(x_test) #", "ax = fig.gca(projection='3d') ax.scatter(x_test[:,0], x_test[:,1], y_test, s=1) ax.set_title('PCE predictor') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlim(-6,6)", "ax = fig.gca(projection='3d') ax.scatter(x[:,0], x[:,1], y, s=20, c='r') ax.set_title('Training data') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140)", "%% md # # Create a distribution object, generate samples and evaluate the", "pce3.fit(x,y) # %% md # # PCE surrogate is used to predict the", "validation error. # %% # validation sample n_samples = 150 x_val = joint.rvs(n_samples)", "%% np.random.seed(1) dist_1 = Uniform(loc=-5.12, scale=10.24) dist_2 = Uniform(loc=-5.12, scale=10.24) marg = [dist_1,", "ax.scatter(x_test[:,0], x_test[:,1], y_test, s=1) ax.set_title('PCE predictor') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlim(-6,6) ax.set_ylim(-6,6) ax.set_xlabel('$x_1$', fontsize=15)", "# %% md # Moment Estimation # ----------------- # Returns mean and variance", "least_squares = LeastSquareRegression() pce = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=least_squares) pce.fit(x,y) # %% md # #", "= np.linspace(xmin, xmax, 50) X2 = np.linspace(ymin, ymax, 50) X1_, X2_ = np.meshgrid(X1,", "fontsize=15) ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20, 140) fig.colorbar(surf, shrink=0.5, aspect=7) plt.show() # %% md #", "1000000 x_mc = joint.rvs(n_mc) y_mc = function(x_mc[:,0], x_mc[:,1]) mean_mc = np.mean(y_mc) var_mc =", "xmax = -6,6 ymin, ymax = -6,6 X1 = np.linspace(xmin, xmax, 50) X2", "+ y**2 # %% md # # Create a distribution object, generate samples", "points f = function(X1_, X2_) fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') surf =", "import numpy as np import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D from", "====================================================================== In this example, PCE is used to generate a surrogate model for", "dist_2 = Uniform(loc=-5.12, scale=10.24) marg = [dist_1, dist_2] joint = JointIndependent(marginals=marg) n_samples =", "from mpl_toolkits.mplot3d import Axes3D from matplotlib import cm from matplotlib.ticker import LinearLocator, FormatStrFormatter", "mean_mc = np.mean(y_mc) var_mc = np.var(y_mc) print('Moments from least squares regression :', pce.get_moments())", "(0,0)`. **Reference:** <NAME>., & <NAME>. (1978). The global optimization problem: an introduction. Towards", "= np.mean(y_mc) var_mc = np.var(y_mc) print('Moments from least squares regression :', pce.get_moments()) print('Moments", "to predict the behavior of the function at new samples. # %% n_test_samples", "Estimation # ----------------- # Returns mean and variance of the PCE surrogate. #", "+ x_2^2 **Description:** Dimensions: 2 **Input Domain:** This function is evaluated on the", "least squares regression. # %% max_degree = 3 polynomial_basis = TotalDegreeBasis(joint, max_degree) least_squares", "= JointIndependent(marginals=marg) n_samples = 100 x = joint.rvs(n_samples) y = function(x[:,0], x[:,1]) #", "Plot PCE prediction. # %% fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x_test[:,0], x_test[:,1],", "with Ridge regression. # %% polynomial_basis = TotalDegreeBasis(joint, max_degree) ridge = RidgeRegression() pce3", "pce.get_moments()) print('Moments from LASSO regression :', pce2.get_moments()) print('Moments from Ridge regression :', pce3.get_moments())", "data. # %% fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x[:,0], x[:,1], y, s=20,", "TotalDegreeBasis(joint, max_degree) lasso = LassoRegression() pce2 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=lasso) pce2.fit(x,y) # %% md", "least squares regression :', pce.get_moments()) print('Moments from LASSO regression :', pce2.get_moments()) print('Moments from", "- y_pce3)/y_val))/n_samples print('Mean rel. error, LSTSQ:', error) print('Mean rel. error, LASSO:', error2) print('Mean", "md # # Visualize the 2D function. # %% xmin, xmax = -6,6", "np.var(y_mc) print('Moments from least squares regression :', pce.get_moments()) print('Moments from LASSO regression :',", "# %% md # # Define the function. # %% def function(x,y): return", "UQpy.distributions import Uniform, JointIndependent # %% md # # Define the function. #", "= 3 polynomial_basis = TotalDegreeBasis(joint, max_degree) least_squares = LeastSquareRegression() pce = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=least_squares)", "ax.set_title('PCE predictor') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlim(-6,6) ax.set_ylim(-6,6) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() #", ":', pce2.get_moments()) print('Moments from Ridge regression :', pce3.get_moments()) print('Moments from Monte Carlo integration:", "= joint.rvs(n_mc) y_mc = function(x_mc[:,0], x_mc[:,1]) mean_mc = np.mean(y_mc) var_mc = np.var(y_mc) print('Moments", "%% md # # Import necessary libraries. # %% import numpy as np", "X2) # grid of points f = function(X1_, X2_) fig = plt.figure(figsize=(10,6)) ax", "# Create a distribution object, generate samples and evaluate the function at the", "pce2.get_moments()) print('Moments from Ridge regression :', pce3.get_moments()) print('Moments from Monte Carlo integration: ',", "error, LSTSQ:', error) print('Mean rel. error, LASSO:', error2) print('Mean rel. error, Ridge:', error3)", "# %% md # # Create an object from the PCE class. Compute", "ymin, ymax = -6,6 X1 = np.linspace(xmin, xmax, 50) X2 = np.linspace(ymin, ymax,", "the hypercube :math:`x_i \\in [-5.12, 5.12]` for all :math:`i = 1,2`. **Global minimum:**", "as np import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D from matplotlib import", "JointIndependent(marginals=marg) n_samples = 100 x = joint.rvs(n_samples) y = function(x[:,0], x[:,1]) # %%", "Moment Estimation # ----------------- # Returns mean and variance of the PCE surrogate.", "linewidth=0, antialiased=False) ax.set_title('True function') ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20, 140) fig.colorbar(surf,", "the function. # %% def function(x,y): return x**2 + y**2 # %% md", "the function at the samples. # %% np.random.seed(1) dist_1 = Uniform(loc=-5.12, scale=10.24) dist_2", "joint.rvs(n_mc) y_mc = function(x_mc[:,0], x_mc[:,1]) mean_mc = np.mean(y_mc) var_mc = np.var(y_mc) print('Moments from", "# %% md # Error Estimation # ----------------- # Construct a validation dataset", ":', pce.get_moments()) print('Moments from LASSO regression :', pce2.get_moments()) print('Moments from Ridge regression :',", "140) fig.colorbar(surf, shrink=0.5, aspect=7) plt.show() # %% md # # Visualize training data.", "150 x_val = joint.rvs(n_samples) y_val = function(x_val[:,0], x_val[:,1]) # PCE predictions y_pce =", "= 150 x_val = joint.rvs(n_samples) y_val = function(x_val[:,0], x_val[:,1]) # PCE predictions y_pce", "= plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x[:,0], x[:,1], y, s=20, c='r') ax.set_title('Training data') ax.zaxis.set_major_locator(LinearLocator(10))", "a validation dataset and get the validation error. # %% # validation sample", "50) X2 = np.linspace(ymin, ymax, 50) X1_, X2_ = np.meshgrid(X1, X2) # grid", "%% md # Error Estimation # ----------------- # Construct a validation dataset and", "x_test[:,1], y_test, s=1) ax.set_title('PCE predictor') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlim(-6,6) ax.set_ylim(-6,6) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$',", "x[:,1], y, s=20, c='r') ax.set_title('Training data') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15)", "# %% polynomial_basis = TotalDegreeBasis(joint, max_degree) lasso = LassoRegression() pce2 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=lasso)", "regression :', pce.get_moments()) print('Moments from LASSO regression :', pce2.get_moments()) print('Moments from Ridge regression", "= RidgeRegression() pce3 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=ridge) pce3.fit(x,y) # %% md # # PCE", "evaluated on the hypercube :math:`x_i \\in [-5.12, 5.12]` for all :math:`i = 1,2`.", "# Compute PCE coefficients with Ridge regression. # %% polynomial_basis = TotalDegreeBasis(joint, max_degree)", "# # Visualize the 2D function. # %% xmin, xmax = -6,6 ymin,", "surrogate. # %% n_mc = 1000000 x_mc = joint.rvs(n_mc) y_mc = function(x_mc[:,0], x_mc[:,1])", "f(x) = x_1^2 + x_2^2 **Description:** Dimensions: 2 **Input Domain:** This function is", "%% md # # Define the function. # %% def function(x,y): return x**2", "# %% md # # Import necessary libraries. # %% import numpy as", "fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x[:,0], x[:,1], y, s=20, c='r') ax.set_title('Training data')", "ax = fig.gca(projection='3d') surf = ax.plot_surface(X1_, X2_, f, rstride=1, cstride=1, cmap='gnuplot2', linewidth=0, antialiased=False)", "function') ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20, 140) fig.colorbar(surf, shrink=0.5, aspect=7) plt.show()", "# %% md # # Compute PCE coefficients using LASSO. # %% polynomial_basis", "scale=10.24) dist_2 = Uniform(loc=-5.12, scale=10.24) marg = [dist_1, dist_2] joint = JointIndependent(marginals=marg) n_samples", "= plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x_test[:,0], x_test[:,1], y_test, s=1) ax.set_title('PCE predictor') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))", "# ----------------- # Construct a validation dataset and get the validation error. #", "antialiased=False) ax.set_title('True function') ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20, 140) fig.colorbar(surf, shrink=0.5,", "# %% max_degree = 3 polynomial_basis = TotalDegreeBasis(joint, max_degree) least_squares = LeastSquareRegression() pce", "polynomial_basis = TotalDegreeBasis(joint, max_degree) ridge = RidgeRegression() pce3 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=ridge) pce3.fit(x,y) #", "y, s=20, c='r') ax.set_title('Training data') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show()", "coefficients using LASSO. # %% polynomial_basis = TotalDegreeBasis(joint, max_degree) lasso = LassoRegression() pce2", "joint.rvs(n_samples) y_val = function(x_val[:,0], x_val[:,1]) # PCE predictions y_pce = pce.predict(x_val).flatten() y_pce2 =", "# %% md # # Create a distribution object, generate samples and evaluate", "regression. # %% polynomial_basis = TotalDegreeBasis(joint, max_degree) ridge = RidgeRegression() pce3 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis,", "validation errors error = np.sum(np.abs((y_val - y_pce)/y_val))/n_samples error2 = np.sum(np.abs((y_val - y_pce2)/y_val))/n_samples error3", "and variance of the PCE surrogate. # %% n_mc = 1000000 x_mc =", "md # # Import necessary libraries. # %% import numpy as np import", "function at the samples. # %% np.random.seed(1) dist_1 = Uniform(loc=-5.12, scale=10.24) dist_2 =", "# Visualize training data. # %% fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x[:,0],", "max_degree) least_squares = LeastSquareRegression() pce = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=least_squares) pce.fit(x,y) # %% md #", "as plt from mpl_toolkits.mplot3d import Axes3D from matplotlib import cm from matplotlib.ticker import", "fontsize=15) plt.show() # %% md # # Create an object from the PCE", "# %% md # # Compute PCE coefficients with Ridge regression. # %%", "function(x_mc[:,0], x_mc[:,1]) mean_mc = np.mean(y_mc) var_mc = np.var(y_mc) print('Moments from least squares regression", "coefficients using least squares regression. # %% max_degree = 3 polynomial_basis = TotalDegreeBasis(joint,", "Error Estimation # ----------------- # Construct a validation dataset and get the validation", "md # # Compute PCE coefficients with Ridge regression. # %% polynomial_basis =", "= x_1^2 + x_2^2 **Description:** Dimensions: 2 **Input Domain:** This function is evaluated", "hypercube :math:`x_i \\in [-5.12, 5.12]` for all :math:`i = 1,2`. **Global minimum:** :math:`f(x^*)=0,`", "# Plot PCE prediction. # %% fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x_test[:,0],", "PCE prediction. # %% fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x_test[:,0], x_test[:,1], y_test,", "y_pce = pce.predict(x_val).flatten() y_pce2 = pce2.predict(x_val).flatten() y_pce3 = pce3.predict(x_val).flatten() # mean relative validation", "- y_pce2)/y_val))/n_samples error3 = np.sum(np.abs((y_val - y_pce3)/y_val))/n_samples print('Mean rel. error, LSTSQ:', error) print('Mean", "%% def function(x,y): return x**2 + y**2 # %% md # # Create", "fig.gca(projection='3d') ax.scatter(x[:,0], x[:,1], y, s=20, c='r') ax.set_title('Training data') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlabel('$x_1$', fontsize=15)", "X2_) fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') surf = ax.plot_surface(X1_, X2_, f, rstride=1,", "x_1^2 + x_2^2 **Description:** Dimensions: 2 **Input Domain:** This function is evaluated on", "x_val = joint.rvs(n_samples) y_val = function(x_val[:,0], x_val[:,1]) # PCE predictions y_pce = pce.predict(x_val).flatten()", "# Construct a validation dataset and get the validation error. # %% #", "The global optimization problem: an introduction. Towards global optimization, 2, 1-15. \"\"\" #", "# %% md # # PCE surrogate is used to predict the behavior", "3 polynomial_basis = TotalDegreeBasis(joint, max_degree) least_squares = LeastSquareRegression() pce = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=least_squares) pce.fit(x,y)", "# # Visualize training data. # %% fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d')", "pce.fit(x,y) # %% md # # Compute PCE coefficients using LASSO. # %%", ":math:`f(x^*)=0,` at :math:`x^* = (0,0)`. **Reference:** <NAME>., & <NAME>. (1978). The global optimization", "predictor') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlim(-6,6) ax.set_ylim(-6,6) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %%", "the samples. # %% np.random.seed(1) dist_1 = Uniform(loc=-5.12, scale=10.24) dist_2 = Uniform(loc=-5.12, scale=10.24)", "In this example, PCE is used to generate a surrogate model for a", "# Compute PCE coefficients using LASSO. # %% polynomial_basis = TotalDegreeBasis(joint, max_degree) lasso", "Visualize training data. # %% fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x[:,0], x[:,1],", "var_mc = np.var(y_mc) print('Moments from least squares regression :', pce.get_moments()) print('Moments from LASSO", "from least squares regression :', pce.get_moments()) print('Moments from LASSO regression :', pce2.get_moments()) print('Moments", "joint.rvs(n_test_samples) y_test = pce.predict(x_test) # %% md # # Plot PCE prediction. #", "distribution object, generate samples and evaluate the function at the samples. # %%", "function(x,y): return x**2 + y**2 # %% md # # Create a distribution", "ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %% md # # Create an object from the", "rel. error, LSTSQ:', error) print('Mean rel. error, LASSO:', error2) print('Mean rel. error, Ridge:',", "= ax.plot_surface(X1_, X2_, f, rstride=1, cstride=1, cmap='gnuplot2', linewidth=0, antialiased=False) ax.set_title('True function') ax.set_xlabel('$x_1$', fontsize=15)", "# %% n_mc = 1000000 x_mc = joint.rvs(n_mc) y_mc = function(x_mc[:,0], x_mc[:,1]) mean_mc", "= function(x_val[:,0], x_val[:,1]) # PCE predictions y_pce = pce.predict(x_val).flatten() y_pce2 = pce2.predict(x_val).flatten() y_pce3", "from Ridge regression :', pce3.get_moments()) print('Moments from Monte Carlo integration: ', mean_mc, var_mc)", "is used to generate a surrogate model for a given set of 2D", "matplotlib import cm from matplotlib.ticker import LinearLocator, FormatStrFormatter from UQpy.surrogates import * from", "a surrogate model for a given set of 2D data. .. math:: f(x)", "(2 random inputs, scalar output) ====================================================================== In this example, PCE is used to", "= np.sum(np.abs((y_val - y_pce3)/y_val))/n_samples print('Mean rel. error, LSTSQ:', error) print('Mean rel. error, LASSO:',", "at new samples. # %% n_test_samples = 10000 x_test = joint.rvs(n_test_samples) y_test =", "error2) print('Mean rel. error, Ridge:', error3) # %% md # Moment Estimation #", "pce2 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=lasso) pce2.fit(x,y) # %% md # # Compute PCE coefficients", "x_mc[:,1]) mean_mc = np.mean(y_mc) var_mc = np.var(y_mc) print('Moments from least squares regression :',", "# PCE surrogate is used to predict the behavior of the function at", "1,2`. **Global minimum:** :math:`f(x^*)=0,` at :math:`x^* = (0,0)`. **Reference:** <NAME>., & <NAME>. (1978).", "class. Compute PCE coefficients using least squares regression. # %% max_degree = 3", "and evaluate the function at the samples. # %% np.random.seed(1) dist_1 = Uniform(loc=-5.12,", "X2_, f, rstride=1, cstride=1, cmap='gnuplot2', linewidth=0, antialiased=False) ax.set_title('True function') ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15)", "%% fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x_test[:,0], x_test[:,1], y_test, s=1) ax.set_title('PCE predictor')", "ax.set_xlim(-6,6) ax.set_ylim(-6,6) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %% md # Error Estimation", "shrink=0.5, aspect=7) plt.show() # %% md # # Visualize training data. # %%", "# PCE predictions y_pce = pce.predict(x_val).flatten() y_pce2 = pce2.predict(x_val).flatten() y_pce3 = pce3.predict(x_val).flatten() #", "aspect=7) plt.show() # %% md # # Visualize training data. # %% fig", "%% xmin, xmax = -6,6 ymin, ymax = -6,6 X1 = np.linspace(xmin, xmax,", "UQpy.surrogates import * from UQpy.distributions import Uniform, JointIndependent # %% md # #", "= -6,6 X1 = np.linspace(xmin, xmax, 50) X2 = np.linspace(ymin, ymax, 50) X1_,", "%% md # # PCE surrogate is used to predict the behavior of", "print('Moments from least squares regression :', pce.get_moments()) print('Moments from LASSO regression :', pce2.get_moments())", "ymax = -6,6 X1 = np.linspace(xmin, xmax, 50) X2 = np.linspace(ymin, ymax, 50)", "an introduction. Towards global optimization, 2, 1-15. \"\"\" # %% md # #", "regression_method=least_squares) pce.fit(x,y) # %% md # # Compute PCE coefficients using LASSO. #", "random inputs, scalar output) ====================================================================== In this example, PCE is used to generate", "= joint.rvs(n_test_samples) y_test = pce.predict(x_test) # %% md # # Plot PCE prediction.", "Estimation # ----------------- # Construct a validation dataset and get the validation error.", "# %% def function(x,y): return x**2 + y**2 # %% md # #", "given set of 2D data. .. math:: f(x) = x_1^2 + x_2^2 **Description:**", "max_degree = 3 polynomial_basis = TotalDegreeBasis(joint, max_degree) least_squares = LeastSquareRegression() pce = PolynomialChaosExpansion(polynomial_basis=polynomial_basis,", "Define the function. # %% def function(x,y): return x**2 + y**2 # %%", "scalar output) ====================================================================== In this example, PCE is used to generate a surrogate", "Domain:** This function is evaluated on the hypercube :math:`x_i \\in [-5.12, 5.12]` for", "# %% np.random.seed(1) dist_1 = Uniform(loc=-5.12, scale=10.24) dist_2 = Uniform(loc=-5.12, scale=10.24) marg =", "& <NAME>. (1978). The global optimization problem: an introduction. Towards global optimization, 2,", "Construct a validation dataset and get the validation error. # %% # validation", "model for a given set of 2D data. .. math:: f(x) = x_1^2", "optimization problem: an introduction. Towards global optimization, 2, 1-15. \"\"\" # %% md", "This function is evaluated on the hypercube :math:`x_i \\in [-5.12, 5.12]` for all", "ax.scatter(x[:,0], x[:,1], y, s=20, c='r') ax.set_title('Training data') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$',", "# Returns mean and variance of the PCE surrogate. # %% n_mc =", "is used to predict the behavior of the function at new samples. #", "md # # Define the function. # %% def function(x,y): return x**2 +", "n_samples = 100 x = joint.rvs(n_samples) y = function(x[:,0], x[:,1]) # %% md", "%% md # # Create an object from the PCE class. Compute PCE", "md # # Create a distribution object, generate samples and evaluate the function", "matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D from matplotlib import cm from matplotlib.ticker", "object, generate samples and evaluate the function at the samples. # %% np.random.seed(1)", "cm from matplotlib.ticker import LinearLocator, FormatStrFormatter from UQpy.surrogates import * from UQpy.distributions import", "return x**2 + y**2 # %% md # # Create a distribution object,", "= Uniform(loc=-5.12, scale=10.24) dist_2 = Uniform(loc=-5.12, scale=10.24) marg = [dist_1, dist_2] joint =", "1-15. \"\"\" # %% md # # Import necessary libraries. # %% import", "= -6,6 ymin, ymax = -6,6 X1 = np.linspace(xmin, xmax, 50) X2 =", "of 2D data. .. math:: f(x) = x_1^2 + x_2^2 **Description:** Dimensions: 2", "%% polynomial_basis = TotalDegreeBasis(joint, max_degree) lasso = LassoRegression() pce2 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=lasso) pce2.fit(x,y)", "prediction. # %% fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x_test[:,0], x_test[:,1], y_test, s=1)", "ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlim(-6,6) ax.set_ylim(-6,6) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %% md #", "from matplotlib.ticker import LinearLocator, FormatStrFormatter from UQpy.surrogates import * from UQpy.distributions import Uniform,", "np.meshgrid(X1, X2) # grid of points f = function(X1_, X2_) fig = plt.figure(figsize=(10,6))", "= pce3.predict(x_val).flatten() # mean relative validation errors error = np.sum(np.abs((y_val - y_pce)/y_val))/n_samples error2", "# # Create an object from the PCE class. Compute PCE coefficients using", "PCE coefficients with Ridge regression. # %% polynomial_basis = TotalDegreeBasis(joint, max_degree) ridge =", "np.linspace(xmin, xmax, 50) X2 = np.linspace(ymin, ymax, 50) X1_, X2_ = np.meshgrid(X1, X2)", "x[:,1]) # %% md # # Visualize the 2D function. # %% xmin,", "= function(x[:,0], x[:,1]) # %% md # # Visualize the 2D function. #", "%% md # # Compute PCE coefficients using LASSO. # %% polynomial_basis =", "dataset and get the validation error. # %% # validation sample n_samples =", "PCE is used to generate a surrogate model for a given set of", "= np.meshgrid(X1, X2) # grid of points f = function(X1_, X2_) fig =", "%% polynomial_basis = TotalDegreeBasis(joint, max_degree) ridge = RidgeRegression() pce3 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=ridge) pce3.fit(x,y)", "fig.gca(projection='3d') ax.scatter(x_test[:,0], x_test[:,1], y_test, s=1) ax.set_title('PCE predictor') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlim(-6,6) ax.set_ylim(-6,6) ax.set_xlabel('$x_1$',", "np import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D from matplotlib import cm", "PCE surrogate is used to predict the behavior of the function at new", "# # Compute PCE coefficients with Ridge regression. # %% polynomial_basis = TotalDegreeBasis(joint,", "s=1) ax.set_title('PCE predictor') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlim(-6,6) ax.set_ylim(-6,6) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show()", "ax.set_ylim(-6,6) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %% md # Error Estimation #", "ax.set_title('Training data') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %% md", "= LassoRegression() pce2 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=lasso) pce2.fit(x,y) # %% md # # Compute", "md # # Plot PCE prediction. # %% fig = plt.figure(figsize=(10,6)) ax =", "Visualize the 2D function. # %% xmin, xmax = -6,6 ymin, ymax =", "the function at new samples. # %% n_test_samples = 10000 x_test = joint.rvs(n_test_samples)", "PCE coefficients using LASSO. # %% polynomial_basis = TotalDegreeBasis(joint, max_degree) lasso = LassoRegression()", "= Uniform(loc=-5.12, scale=10.24) marg = [dist_1, dist_2] joint = JointIndependent(marginals=marg) n_samples = 100", "md # # Visualize training data. # %% fig = plt.figure(figsize=(10,6)) ax =", "necessary libraries. # %% import numpy as np import matplotlib.pyplot as plt from", "fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %% md # Error Estimation # ----------------- #", "and get the validation error. # %% # validation sample n_samples = 150", "pce3 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=ridge) pce3.fit(x,y) # %% md # # PCE surrogate is", "ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlim(-6,6) ax.set_ylim(-6,6) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %% md", "a distribution object, generate samples and evaluate the function at the samples. #", "fig.colorbar(surf, shrink=0.5, aspect=7) plt.show() # %% md # # Visualize training data. #", "# %% polynomial_basis = TotalDegreeBasis(joint, max_degree) ridge = RidgeRegression() pce3 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=ridge)", "= plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') surf = ax.plot_surface(X1_, X2_, f, rstride=1, cstride=1, cmap='gnuplot2',", "ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %% md # Error Estimation # ----------------- # Construct", "global optimization problem: an introduction. Towards global optimization, 2, 1-15. \"\"\" # %%", "np.random.seed(1) dist_1 = Uniform(loc=-5.12, scale=10.24) dist_2 = Uniform(loc=-5.12, scale=10.24) marg = [dist_1, dist_2]", "ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20, 140) fig.colorbar(surf, shrink=0.5, aspect=7) plt.show() # %% md # # Visualize", "= 1000000 x_mc = joint.rvs(n_mc) y_mc = function(x_mc[:,0], x_mc[:,1]) mean_mc = np.mean(y_mc) var_mc", "----------------- # Construct a validation dataset and get the validation error. # %%", "error3) # %% md # Moment Estimation # ----------------- # Returns mean and", "sample n_samples = 150 x_val = joint.rvs(n_samples) y_val = function(x_val[:,0], x_val[:,1]) # PCE", "ax.view_init(20,140) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %% md # # Create an", "-6,6 X1 = np.linspace(xmin, xmax, 50) X2 = np.linspace(ymin, ymax, 50) X1_, X2_", "X1 = np.linspace(xmin, xmax, 50) X2 = np.linspace(ymin, ymax, 50) X1_, X2_ =", "Import necessary libraries. # %% import numpy as np import matplotlib.pyplot as plt", "ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20, 140) fig.colorbar(surf, shrink=0.5, aspect=7) plt.show() #", "x_val[:,1]) # PCE predictions y_pce = pce.predict(x_val).flatten() y_pce2 = pce2.predict(x_val).flatten() y_pce3 = pce3.predict(x_val).flatten()", "Towards global optimization, 2, 1-15. \"\"\" # %% md # # Import necessary", "2 **Input Domain:** This function is evaluated on the hypercube :math:`x_i \\in [-5.12,", "2, 1-15. \"\"\" # %% md # # Import necessary libraries. # %%", "squares regression. # %% max_degree = 3 polynomial_basis = TotalDegreeBasis(joint, max_degree) least_squares =", "Uniform(loc=-5.12, scale=10.24) marg = [dist_1, dist_2] joint = JointIndependent(marginals=marg) n_samples = 100 x", "rel. error, LASSO:', error2) print('Mean rel. error, Ridge:', error3) # %% md #", "x_mc = joint.rvs(n_mc) y_mc = function(x_mc[:,0], x_mc[:,1]) mean_mc = np.mean(y_mc) var_mc = np.var(y_mc)", "from the PCE class. Compute PCE coefficients using least squares regression. # %%", "md # # Create an object from the PCE class. Compute PCE coefficients", "plt from mpl_toolkits.mplot3d import Axes3D from matplotlib import cm from matplotlib.ticker import LinearLocator,", "scale=10.24) marg = [dist_1, dist_2] joint = JointIndependent(marginals=marg) n_samples = 100 x =", "= TotalDegreeBasis(joint, max_degree) least_squares = LeastSquareRegression() pce = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=least_squares) pce.fit(x,y) # %%", "# validation sample n_samples = 150 x_val = joint.rvs(n_samples) y_val = function(x_val[:,0], x_val[:,1])", "x_2^2 **Description:** Dimensions: 2 **Input Domain:** This function is evaluated on the hypercube", "function at new samples. # %% n_test_samples = 10000 x_test = joint.rvs(n_test_samples) y_test", "np.mean(y_mc) var_mc = np.var(y_mc) print('Moments from least squares regression :', pce.get_moments()) print('Moments from", "ax.plot_surface(X1_, X2_, f, rstride=1, cstride=1, cmap='gnuplot2', linewidth=0, antialiased=False) ax.set_title('True function') ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$',", ":math:`i = 1,2`. **Global minimum:** :math:`f(x^*)=0,` at :math:`x^* = (0,0)`. **Reference:** <NAME>., &", "object from the PCE class. Compute PCE coefficients using least squares regression. #", "PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=least_squares) pce.fit(x,y) # %% md # # Compute PCE coefficients using LASSO.", "md # # Compute PCE coefficients using LASSO. # %% polynomial_basis = TotalDegreeBasis(joint,", "n_test_samples = 10000 x_test = joint.rvs(n_test_samples) y_test = pce.predict(x_test) # %% md #", "function is evaluated on the hypercube :math:`x_i \\in [-5.12, 5.12]` for all :math:`i", "%% fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x[:,0], x[:,1], y, s=20, c='r') ax.set_title('Training", "# # Plot PCE prediction. # %% fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d')", "c='r') ax.set_title('Training data') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %%", "<NAME>. (1978). The global optimization problem: an introduction. Towards global optimization, 2, 1-15.", "rel. error, Ridge:', error3) # %% md # Moment Estimation # ----------------- #", "np.linspace(ymin, ymax, 50) X1_, X2_ = np.meshgrid(X1, X2) # grid of points f", "data') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %% md #", "Compute PCE coefficients with Ridge regression. # %% polynomial_basis = TotalDegreeBasis(joint, max_degree) ridge", "ax.set_title('True function') ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20, 140) fig.colorbar(surf, shrink=0.5, aspect=7)", "pce2.fit(x,y) # %% md # # Compute PCE coefficients with Ridge regression. #", "f, rstride=1, cstride=1, cmap='gnuplot2', linewidth=0, antialiased=False) ax.set_title('True function') ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) ax.zaxis.set_major_locator(LinearLocator(10))", "function. # %% xmin, xmax = -6,6 ymin, ymax = -6,6 X1 =", "\\in [-5.12, 5.12]` for all :math:`i = 1,2`. **Global minimum:** :math:`f(x^*)=0,` at :math:`x^*", "%% md # Moment Estimation # ----------------- # Returns mean and variance of", "error3 = np.sum(np.abs((y_val - y_pce3)/y_val))/n_samples print('Mean rel. error, LSTSQ:', error) print('Mean rel. error,", "= LeastSquareRegression() pce = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=least_squares) pce.fit(x,y) # %% md # # Compute", "polynomial_basis = TotalDegreeBasis(joint, max_degree) least_squares = LeastSquareRegression() pce = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=least_squares) pce.fit(x,y) #", "max_degree) lasso = LassoRegression() pce2 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=lasso) pce2.fit(x,y) # %% md #", "ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %% md # Error Estimation # -----------------", "y_pce3 = pce3.predict(x_val).flatten() # mean relative validation errors error = np.sum(np.abs((y_val - y_pce)/y_val))/n_samples", "= 1,2`. **Global minimum:** :math:`f(x^*)=0,` at :math:`x^* = (0,0)`. **Reference:** <NAME>., & <NAME>.", "plt.show() # %% md # # Create an object from the PCE class.", "fig.gca(projection='3d') surf = ax.plot_surface(X1_, X2_, f, rstride=1, cstride=1, cmap='gnuplot2', linewidth=0, antialiased=False) ax.set_title('True function')", "%% md # # Compute PCE coefficients with Ridge regression. # %% polynomial_basis", "matplotlib.ticker import LinearLocator, FormatStrFormatter from UQpy.surrogates import * from UQpy.distributions import Uniform, JointIndependent", "Uniform(loc=-5.12, scale=10.24) dist_2 = Uniform(loc=-5.12, scale=10.24) marg = [dist_1, dist_2] joint = JointIndependent(marginals=marg)", "function (2 random inputs, scalar output) ====================================================================== In this example, PCE is used", "plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x[:,0], x[:,1], y, s=20, c='r') ax.set_title('Training data') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))", "md # Moment Estimation # ----------------- # Returns mean and variance of the", "= pce2.predict(x_val).flatten() y_pce3 = pce3.predict(x_val).flatten() # mean relative validation errors error = np.sum(np.abs((y_val", "inputs, scalar output) ====================================================================== In this example, PCE is used to generate a", "s=20, c='r') ax.set_title('Training data') ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() #", "LinearLocator, FormatStrFormatter from UQpy.surrogates import * from UQpy.distributions import Uniform, JointIndependent # %%", "# %% fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x_test[:,0], x_test[:,1], y_test, s=1) ax.set_title('PCE", "= TotalDegreeBasis(joint, max_degree) lasso = LassoRegression() pce2 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=lasso) pce2.fit(x,y) # %%", "PCE class. Compute PCE coefficients using least squares regression. # %% max_degree =", "introduction. Towards global optimization, 2, 1-15. \"\"\" # %% md # # Import", "def function(x,y): return x**2 + y**2 # %% md # # Create a", "samples. # %% np.random.seed(1) dist_1 = Uniform(loc=-5.12, scale=10.24) dist_2 = Uniform(loc=-5.12, scale=10.24) marg", "pce3.predict(x_val).flatten() # mean relative validation errors error = np.sum(np.abs((y_val - y_pce)/y_val))/n_samples error2 =", "the PCE class. Compute PCE coefficients using least squares regression. # %% max_degree", "LASSO regression :', pce2.get_moments()) print('Moments from Ridge regression :', pce3.get_moments()) print('Moments from Monte", "for a given set of 2D data. .. math:: f(x) = x_1^2 +", "n_samples = 150 x_val = joint.rvs(n_samples) y_val = function(x_val[:,0], x_val[:,1]) # PCE predictions", "data. .. math:: f(x) = x_1^2 + x_2^2 **Description:** Dimensions: 2 **Input Domain:**", "at the samples. # %% np.random.seed(1) dist_1 = Uniform(loc=-5.12, scale=10.24) dist_2 = Uniform(loc=-5.12,", ":math:`x^* = (0,0)`. **Reference:** <NAME>., & <NAME>. (1978). The global optimization problem: an", "= fig.gca(projection='3d') surf = ax.plot_surface(X1_, X2_, f, rstride=1, cstride=1, cmap='gnuplot2', linewidth=0, antialiased=False) ax.set_title('True", "\"\"\" # %% md # # Import necessary libraries. # %% import numpy", "y_pce2)/y_val))/n_samples error3 = np.sum(np.abs((y_val - y_pce3)/y_val))/n_samples print('Mean rel. error, LSTSQ:', error) print('Mean rel.", "error, Ridge:', error3) # %% md # Moment Estimation # ----------------- # Returns", "global optimization, 2, 1-15. \"\"\" # %% md # # Import necessary libraries.", "# Visualize the 2D function. # %% xmin, xmax = -6,6 ymin, ymax", "predict the behavior of the function at new samples. # %% n_test_samples =", "ax.view_init(20, 140) fig.colorbar(surf, shrink=0.5, aspect=7) plt.show() # %% md # # Visualize training", "regression :', pce2.get_moments()) print('Moments from Ridge regression :', pce3.get_moments()) print('Moments from Monte Carlo", "all :math:`i = 1,2`. **Global minimum:** :math:`f(x^*)=0,` at :math:`x^* = (0,0)`. **Reference:** <NAME>.,", "rstride=1, cstride=1, cmap='gnuplot2', linewidth=0, antialiased=False) ax.set_title('True function') ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))", "fig = plt.figure(figsize=(10,6)) ax = fig.gca(projection='3d') ax.scatter(x_test[:,0], x_test[:,1], y_test, s=1) ax.set_title('PCE predictor') ax.zaxis.set_major_locator(LinearLocator(10))", "joint = JointIndependent(marginals=marg) n_samples = 100 x = joint.rvs(n_samples) y = function(x[:,0], x[:,1])", "= 100 x = joint.rvs(n_samples) y = function(x[:,0], x[:,1]) # %% md #", "from LASSO regression :', pce2.get_moments()) print('Moments from Ridge regression :', pce3.get_moments()) print('Moments from", "[-5.12, 5.12]` for all :math:`i = 1,2`. **Global minimum:** :math:`f(x^*)=0,` at :math:`x^* =", "Function Sphere function (2 random inputs, scalar output) ====================================================================== In this example, PCE", "error, LASSO:', error2) print('Mean rel. error, Ridge:', error3) # %% md # Moment", "y_test = pce.predict(x_test) # %% md # # Plot PCE prediction. # %%", "2D function. # %% xmin, xmax = -6,6 ymin, ymax = -6,6 X1", "TotalDegreeBasis(joint, max_degree) ridge = RidgeRegression() pce3 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=ridge) pce3.fit(x,y) # %% md", "y_mc = function(x_mc[:,0], x_mc[:,1]) mean_mc = np.mean(y_mc) var_mc = np.var(y_mc) print('Moments from least", "# # Define the function. # %% def function(x,y): return x**2 + y**2", "regression. # %% max_degree = 3 polynomial_basis = TotalDegreeBasis(joint, max_degree) least_squares = LeastSquareRegression()", "Returns mean and variance of the PCE surrogate. # %% n_mc = 1000000", "Ridge:', error3) # %% md # Moment Estimation # ----------------- # Returns mean", "= np.sum(np.abs((y_val - y_pce2)/y_val))/n_samples error3 = np.sum(np.abs((y_val - y_pce3)/y_val))/n_samples print('Mean rel. error, LSTSQ:',", "Axes3D from matplotlib import cm from matplotlib.ticker import LinearLocator, FormatStrFormatter from UQpy.surrogates import", "surf = ax.plot_surface(X1_, X2_, f, rstride=1, cstride=1, cmap='gnuplot2', linewidth=0, antialiased=False) ax.set_title('True function') ax.set_xlabel('$x_1$',", "print('Moments from LASSO regression :', pce2.get_moments()) print('Moments from Ridge regression :', pce3.get_moments()) print('Moments", "from UQpy.distributions import Uniform, JointIndependent # %% md # # Define the function.", "generate samples and evaluate the function at the samples. # %% np.random.seed(1) dist_1", "%% max_degree = 3 polynomial_basis = TotalDegreeBasis(joint, max_degree) least_squares = LeastSquareRegression() pce =", "import Uniform, JointIndependent # %% md # # Define the function. # %%", "LeastSquareRegression() pce = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=least_squares) pce.fit(x,y) # %% md # # Compute PCE", "behavior of the function at new samples. # %% n_test_samples = 10000 x_test", "x = joint.rvs(n_samples) y = function(x[:,0], x[:,1]) # %% md # # Visualize", "numpy as np import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D from matplotlib", "of the function at new samples. # %% n_test_samples = 10000 x_test =", "ridge = RidgeRegression() pce3 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=ridge) pce3.fit(x,y) # %% md # #", "# # Create a distribution object, generate samples and evaluate the function at", "validation dataset and get the validation error. # %% # validation sample n_samples", "using least squares regression. # %% max_degree = 3 polynomial_basis = TotalDegreeBasis(joint, max_degree)", "LassoRegression() pce2 = PolynomialChaosExpansion(polynomial_basis=polynomial_basis, regression_method=lasso) pce2.fit(x,y) # %% md # # Compute PCE", "%% md # # Plot PCE prediction. # %% fig = plt.figure(figsize=(10,6)) ax", "y = function(x[:,0], x[:,1]) # %% md # # Visualize the 2D function.", "- y_pce)/y_val))/n_samples error2 = np.sum(np.abs((y_val - y_pce2)/y_val))/n_samples error3 = np.sum(np.abs((y_val - y_pce3)/y_val))/n_samples print('Mean", "np.sum(np.abs((y_val - y_pce)/y_val))/n_samples error2 = np.sum(np.abs((y_val - y_pce2)/y_val))/n_samples error3 = np.sum(np.abs((y_val - y_pce3)/y_val))/n_samples", "xmax, 50) X2 = np.linspace(ymin, ymax, 50) X1_, X2_ = np.meshgrid(X1, X2) #", "output) ====================================================================== In this example, PCE is used to generate a surrogate model", "md # Error Estimation # ----------------- # Construct a validation dataset and get", "= function(x_mc[:,0], x_mc[:,1]) mean_mc = np.mean(y_mc) var_mc = np.var(y_mc) print('Moments from least squares", "# ----------------- # Returns mean and variance of the PCE surrogate. # %%", "y**2 # %% md # # Create a distribution object, generate samples and", "print('Mean rel. error, Ridge:', error3) # %% md # Moment Estimation # -----------------", "100 x = joint.rvs(n_samples) y = function(x[:,0], x[:,1]) # %% md # #", "%% md # # Visualize training data. # %% fig = plt.figure(figsize=(10,6)) ax", "to generate a surrogate model for a given set of 2D data. ..", "10000 x_test = joint.rvs(n_test_samples) y_test = pce.predict(x_test) # %% md # # Plot", "ax.zaxis.set_major_locator(LinearLocator(10)) ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f')) ax.view_init(20,140) ax.set_xlabel('$x_1$', fontsize=15) ax.set_ylabel('$x_2$', fontsize=15) plt.show() # %% md # #", "the validation error. # %% # validation sample n_samples = 150 x_val =", "# %% # validation sample n_samples = 150 x_val = joint.rvs(n_samples) y_val =", "# %% n_test_samples = 10000 x_test = joint.rvs(n_test_samples) y_test = pce.predict(x_test) # %%", "error) print('Mean rel. error, LASSO:', error2) print('Mean rel. error, Ridge:', error3) # %%", "import cm from matplotlib.ticker import LinearLocator, FormatStrFormatter from UQpy.surrogates import * from UQpy.distributions", "# Create an object from the PCE class. Compute PCE coefficients using least", "get the validation error. # %% # validation sample n_samples = 150 x_val", "math:: f(x) = x_1^2 + x_2^2 **Description:** Dimensions: 2 **Input Domain:** This function", "function. # %% def function(x,y): return x**2 + y**2 # %% md #", "2D data. .. math:: f(x) = x_1^2 + x_2^2 **Description:** Dimensions: 2 **Input", "\"\"\" Sinusoidal Function Sphere function (2 random inputs, scalar output) ====================================================================== In this", "the PCE surrogate. # %% n_mc = 1000000 x_mc = joint.rvs(n_mc) y_mc =", ":math:`x_i \\in [-5.12, 5.12]` for all :math:`i = 1,2`. **Global minimum:** :math:`f(x^*)=0,` at", "np.sum(np.abs((y_val - y_pce3)/y_val))/n_samples print('Mean rel. error, LSTSQ:', error) print('Mean rel. error, LASSO:', error2)", "**Global minimum:** :math:`f(x^*)=0,` at :math:`x^* = (0,0)`. **Reference:** <NAME>., & <NAME>. (1978). The", "JointIndependent # %% md # # Define the function. # %% def function(x,y):", "samples and evaluate the function at the samples. # %% np.random.seed(1) dist_1 =" ]
[ "if lista[i] >= tempmax: temp = lista[i] -tempmax +1 count += temp lista[i]", "i in range(n): lista.append(int(input())) tempmax = lista[-1] count = 0 for i in", "count = 0 for i in range(n-2,-1,-1): if lista[i] >= tempmax: temp =", "in range(n): lista.append(int(input())) tempmax = lista[-1] count = 0 for i in range(n-2,-1,-1):", "for i in range(n): lista.append(int(input())) tempmax = lista[-1] count = 0 for i", "= lista[i] -tempmax +1 count += temp lista[i] -= temp tempmax = lista[i]", "lista[-1] count = 0 for i in range(n-2,-1,-1): if lista[i] >= tempmax: temp", "= lista[-1] count = 0 for i in range(n-2,-1,-1): if lista[i] >= tempmax:", "int(input()) lista= [] for i in range(n): lista.append(int(input())) tempmax = lista[-1] count =", "lista= [] for i in range(n): lista.append(int(input())) tempmax = lista[-1] count = 0", "0 for i in range(n-2,-1,-1): if lista[i] >= tempmax: temp = lista[i] -tempmax", "lista.append(int(input())) tempmax = lista[-1] count = 0 for i in range(n-2,-1,-1): if lista[i]", "range(n-2,-1,-1): if lista[i] >= tempmax: temp = lista[i] -tempmax +1 count += temp", "lista[i] >= tempmax: temp = lista[i] -tempmax +1 count += temp lista[i] -=", "n = int(input()) lista= [] for i in range(n): lista.append(int(input())) tempmax = lista[-1]", "range(n): lista.append(int(input())) tempmax = lista[-1] count = 0 for i in range(n-2,-1,-1): if", "for i in range(n-2,-1,-1): if lista[i] >= tempmax: temp = lista[i] -tempmax +1", "in range(n-2,-1,-1): if lista[i] >= tempmax: temp = lista[i] -tempmax +1 count +=", "temp = lista[i] -tempmax +1 count += temp lista[i] -= temp tempmax =", "lista[i] -tempmax +1 count += temp lista[i] -= temp tempmax = lista[i] print(count)", "[] for i in range(n): lista.append(int(input())) tempmax = lista[-1] count = 0 for", "tempmax = lista[-1] count = 0 for i in range(n-2,-1,-1): if lista[i] >=", "tempmax: temp = lista[i] -tempmax +1 count += temp lista[i] -= temp tempmax", "i in range(n-2,-1,-1): if lista[i] >= tempmax: temp = lista[i] -tempmax +1 count", "= 0 for i in range(n-2,-1,-1): if lista[i] >= tempmax: temp = lista[i]", ">= tempmax: temp = lista[i] -tempmax +1 count += temp lista[i] -= temp", "= int(input()) lista= [] for i in range(n): lista.append(int(input())) tempmax = lista[-1] count" ]
[ "q q = q + h * p vp.append(p) vq.append(q) plt.plot(vq, vp) plt.savefig(\"sym1.png\")", "1.0 p = 0.0 for i in range(1000): p = p - h", "in range(1000): p = p - h * q q = q +", "= 0.0 for i in range(1000): p = p - h * q", "for i in range(1000): p = p - h * q q =", "= 1.0 p = 0.0 for i in range(1000): p = p -", "as plt vq = [] vp = [] h = 0.05 q =", "= [] vp = [] h = 0.05 q = 1.0 p =", "h * q q = q + h * p vp.append(p) vq.append(q) plt.plot(vq,", "q = 1.0 p = 0.0 for i in range(1000): p = p", "i in range(1000): p = p - h * q q = q", "p = p - h * q q = q + h *", "0.0 for i in range(1000): p = p - h * q q", "[] vp = [] h = 0.05 q = 1.0 p = 0.0", "matplotlib.pyplot as plt vq = [] vp = [] h = 0.05 q", "= [] h = 0.05 q = 1.0 p = 0.0 for i", "0.05 q = 1.0 p = 0.0 for i in range(1000): p =", "vp = [] h = 0.05 q = 1.0 p = 0.0 for", "range(1000): p = p - h * q q = q + h", "import matplotlib.pyplot as plt vq = [] vp = [] h = 0.05", "- h * q q = q + h * p vp.append(p) vq.append(q)", "h = 0.05 q = 1.0 p = 0.0 for i in range(1000):", "= 0.05 q = 1.0 p = 0.0 for i in range(1000): p", "* q q = q + h * p vp.append(p) vq.append(q) plt.plot(vq, vp)", "p - h * q q = q + h * p vp.append(p)", "vq = [] vp = [] h = 0.05 q = 1.0 p", "= p - h * q q = q + h * p", "plt vq = [] vp = [] h = 0.05 q = 1.0", "<gh_stars>1-10 import matplotlib.pyplot as plt vq = [] vp = [] h =", "[] h = 0.05 q = 1.0 p = 0.0 for i in", "p = 0.0 for i in range(1000): p = p - h *" ]
[ "distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "window[wlen] * data[batch, col * hop_length + wlen] * tir.cos(2 * pi *", "Fourier transform of short overlapping windows of the input. This gives frequency components", "window = [4, 3, 2] [n_fft, hop_length, win_length, normalized, onesided] = [3, 3,", "max_threads) def stft( data, n_fft, hop_length, win_length, window, normalized, onesided, output_shape, ): \"\"\"", "tid = bx * max_threads + tx with ib.if_scope(tid < output_size): matrix_size =", "License for the # specific language governing permissions and limitations # under the", "signal as they change over time. Parameters ---------- data : relay.Expr Either a", "[34.0000, 0.0000]], [[ 4.5000, 0.8660], [ 1.0000, -1.7321]]] \"\"\" def gen_ir( data_ptr, n_fft,", "tir.sqrt(tir.const(n_fft, \"float32\")) output[batch, row, col, 1] /= tir.sqrt(tir.const(n_fft, \"float32\")) return ib.get() output_buf =", "row, col, 0] /= tir.sqrt(tir.const(n_fft, \"float32\")) output[batch, row, col, 1] /= tir.sqrt(tir.const(n_fft, \"float32\"))", "(ASF) under one # or more contributor license agreements. See the NOTICE file", "True] relay.stft(data, n_fft, hop_length, win_length, window, normalized, onesided) -> [[[15.0000, 0.0000], [34.0000, 0.0000]],", "row, col, 0] = tir.Cast(data_ptr.dtype, 0) output[batch, row, col, 1] = tir.Cast(data_ptr.dtype, 0)", "( window[wlen] * data[batch, col * hop_length + wlen] * tir.sin(2 * pi", "onesided, output_ptr, ): ib = tir.ir_builder.create() data = ib.buffer_ptr(data_ptr) window = ib.buffer_ptr(window_ptr) output", "1] -= ( window[wlen] * data[batch, col * hop_length + wlen] * tir.sin(2", "row, col, 1] /= tir.sqrt(tir.const(n_fft, \"float32\")) return ib.get() output_buf = tir.decl_buffer(output_shape, data.dtype, \"output_buf\")", "* output_ptr.shape[1] * output_ptr.shape[2] with ib.new_scope(): nthread_tx = max_threads nthread_bx = ceil_div(output_size, max_threads)", "software distributed under the License is distributed on an # \"AS IS\" BASIS,", "5, 6] window = [4, 3, 2] [n_fft, hop_length, win_length, normalized, onesided] =", "* pi * row * wlen / win_length) ) with ib.if_scope(normalized): output[batch, row,", "int The distance between neighboring sliding window frames win_length : int The size", "col, 0] = tir.Cast(data_ptr.dtype, 0) output[batch, row, col, 1] = tir.Cast(data_ptr.dtype, 0) with", "over time. Parameters ---------- data : relay.Expr Either a 1-D tensor or a", "of Fourier transform hop_length : int The distance between neighboring sliding window frames", "size of window frame and STFT filter window : relay.Expr A 1-D tensor", "= [4, 3, 2] [n_fft, hop_length, win_length, normalized, onesided] = [3, 3, 3,", "language governing permissions and limitations # under the License. # pylint: disable=invalid-name, too-many-arguments,", "= ib.buffer_ptr(window_ptr) output = ib.buffer_ptr(output_ptr) max_threads = _get_max_threads(output_ptr.shape[0] * output_ptr.shape[1]) output_size = output_ptr.shape[0]", "normalized, onesided, output_ptr, ): ib = tir.ir_builder.create() data = ib.buffer_ptr(data_ptr) window = ib.buffer_ptr(window_ptr)", "): ib = tir.ir_builder.create() data = ib.buffer_ptr(data_ptr) window = ib.buffer_ptr(window_ptr) output = ib.buffer_ptr(output_ptr)", "under the License is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES", "additional information # regarding copyright ownership. The ASF licenses this file # to", "# \"License\"); you may not use this file except in compliance # with", "ins, outs: gen_ir( ins[0], n_fft, hop_length, win_length, ins[1], normalized, onesided, outs[0] ), dtype=[data.dtype],", "Licensed to the Apache Software Foundation (ASF) under one # or more contributor", "win_length) as wlen: output[batch, row, col, 0] += ( window[wlen] * data[batch, col", "tir.ir_builder.create() data = ib.buffer_ptr(data_ptr) window = ib.buffer_ptr(window_ptr) output = ib.buffer_ptr(output_ptr) max_threads = _get_max_threads(output_ptr.shape[0]", "fill with conjugate symmetry Returns ------- output : relay.Expr Tensor containing the STFT", "or more contributor license agreements. See the NOTICE file # distributed with this", "* output_ptr.shape[1]) output_size = output_ptr.shape[0] * output_ptr.shape[1] * output_ptr.shape[2] with ib.new_scope(): nthread_tx =", "OR CONDITIONS OF ANY # KIND, either express or implied. See the License", "Foundation (ASF) under one # or more contributor license agreements. See the NOTICE", "hop_length + wlen] * tir.cos(2 * pi * row * wlen / win_length)", "lambda ins, outs: gen_ir( ins[0], n_fft, hop_length, win_length, ins[1], normalized, onesided, outs[0] ),", "Apache Software Foundation (ASF) under one # or more contributor license agreements. See", "transform hop_length : int The distance between neighboring sliding window frames win_length :", "[3, 3, 3, False, True] relay.stft(data, n_fft, hop_length, win_length, window, normalized, onesided) ->", "stft( data, n_fft, hop_length, win_length, window, normalized, onesided, output_shape, ): \"\"\" The STFT", "\"\"\" def gen_ir( data_ptr, n_fft, hop_length, win_length, window_ptr, normalized, onesided, output_ptr, ): ib", "the normalized STFT results onesided : bool Whether to return onesided result or", "window = ib.buffer_ptr(window_ptr) output = ib.buffer_ptr(output_ptr) max_threads = _get_max_threads(output_ptr.shape[0] * output_ptr.shape[1]) output_size =", "in compliance # with the License. You may obtain a copy of the", "implied. See the License for the # specific language governing permissions and limitations", "return onesided result or fill with conjugate symmetry Returns ------- output : relay.Expr", "or agreed to in writing, # software distributed under the License is distributed", "max_threads = _get_max_threads(output_ptr.shape[0] * output_ptr.shape[1]) output_size = output_ptr.shape[0] * output_ptr.shape[1] * output_ptr.shape[2] with", "0.8660], [ 1.0000, -1.7321]]] \"\"\" def gen_ir( data_ptr, n_fft, hop_length, win_length, window_ptr, normalized,", "win_length : int The size of window frame and STFT filter window :", "\"output_buf\") return te.extern( output_shape, [data, window], lambda ins, outs: gen_ir( ins[0], n_fft, hop_length,", "[n_fft, hop_length, win_length, normalized, onesided] = [3, 3, 3, False, True] relay.stft(data, n_fft,", "max_threads) tx = te.thread_axis(\"threadIdx.x\") bx = te.thread_axis(\"blockIdx.x\") ib.scope_attr(tx, \"thread_extent\", nthread_tx) ib.scope_attr(bx, \"thread_extent\", nthread_bx)", "license agreements. See the NOTICE file # distributed with this work for additional", "6] window = [4, 3, 2] [n_fft, hop_length, win_length, normalized, onesided] = [3,", "+ wlen] * tir.sin(2 * pi * row * wlen / win_length) )", "\"License\"); you may not use this file except in compliance # with the", "output_ptr.shape[2]) col = tir.indexmod(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) output[batch, row, col, 0] = tir.Cast(data_ptr.dtype, 0)", "too-many-arguments, too-many-nested-blocks, unused-argument \"\"\"STFT operator\"\"\" from math import pi import tvm from tvm", "tir.cos(2 * pi * row * wlen / win_length) ) output[batch, row, col,", "onesided) -> [[[15.0000, 0.0000], [34.0000, 0.0000]], [[ 4.5000, 0.8660], [ 1.0000, -1.7321]]] \"\"\"", "governing permissions and limitations # under the License. # pylint: disable=invalid-name, too-many-arguments, too-many-nested-blocks,", "( window[wlen] * data[batch, col * hop_length + wlen] * tir.cos(2 * pi", "window : relay.Expr A 1-D tensor window frame normalized : bool Whether to", "from tvm import te, tir from ..utils import ceil_div def _get_max_threads(batch_row): max_threads =", "= output_ptr.shape[1] * output_ptr.shape[2] batch = tir.floordiv(tid, matrix_size) row = tir.floordiv(tir.indexmod(tid, matrix_size), output_ptr.shape[2])", "either express or implied. See the License for the # specific language governing", "win_length) ) with ib.if_scope(normalized): output[batch, row, col, 0] /= tir.sqrt(tir.const(n_fft, \"float32\")) output[batch, row,", "output_ptr.shape[2] batch = tir.floordiv(tid, matrix_size) row = tir.floordiv(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) col = tir.indexmod(tir.indexmod(tid,", "the License. # pylint: disable=invalid-name, too-many-arguments, too-many-nested-blocks, unused-argument \"\"\"STFT operator\"\"\" from math import", "results onesided : bool Whether to return onesided result or fill with conjugate", "2, 3, 4, 5, 6] window = [4, 3, 2] [n_fft, hop_length, win_length,", "not use this file except in compliance # with the License. You may", "too-many-nested-blocks, unused-argument \"\"\"STFT operator\"\"\" from math import pi import tvm from tvm import", "tir from ..utils import ceil_div def _get_max_threads(batch_row): max_threads = tvm.target.Target.current(allow_none=False).max_num_threads return tir.min(batch_row, max_threads)", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "# or more contributor license agreements. See the NOTICE file # distributed with", "window, normalized, onesided) -> [[[15.0000, 0.0000], [34.0000, 0.0000]], [[ 4.5000, 0.8660], [ 1.0000,", "= tir.floordiv(tid, matrix_size) row = tir.floordiv(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) col = tir.indexmod(tir.indexmod(tid, matrix_size), output_ptr.shape[2])", "a 2-D batch tensor. n_fft : int The size of Fourier transform hop_length", "output_ptr.shape[1] * output_ptr.shape[2] with ib.new_scope(): nthread_tx = max_threads nthread_bx = ceil_div(output_size, max_threads) tx", "te.extern( output_shape, [data, window], lambda ins, outs: gen_ir( ins[0], n_fft, hop_length, win_length, ins[1],", "1-D tensor window frame normalized : bool Whether to return the normalized STFT", "tvm.target.Target.current(allow_none=False).max_num_threads return tir.min(batch_row, max_threads) def stft( data, n_fft, hop_length, win_length, window, normalized, onesided,", "= [3, 3, 3, False, True] relay.stft(data, n_fft, hop_length, win_length, window, normalized, onesided)", "WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the", "hop_length, win_length, window, normalized, onesided, output_shape, ): \"\"\" The STFT computes the Fourier", "limitations # under the License. # pylint: disable=invalid-name, too-many-arguments, too-many-nested-blocks, unused-argument \"\"\"STFT operator\"\"\"", "nthread_bx) tid = bx * max_threads + tx with ib.if_scope(tid < output_size): matrix_size", "[[[15.0000, 0.0000], [34.0000, 0.0000]], [[ 4.5000, 0.8660], [ 1.0000, -1.7321]]] \"\"\" def gen_ir(", "2] [n_fft, hop_length, win_length, normalized, onesided] = [3, 3, 3, False, True] relay.stft(data,", "output[batch, row, col, 0] += ( window[wlen] * data[batch, col * hop_length +", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "output[batch, row, col, 1] /= tir.sqrt(tir.const(n_fft, \"float32\")) return ib.get() output_buf = tir.decl_buffer(output_shape, data.dtype,", "pylint: disable=invalid-name, too-many-arguments, too-many-nested-blocks, unused-argument \"\"\"STFT operator\"\"\" from math import pi import tvm", "time. Parameters ---------- data : relay.Expr Either a 1-D tensor or a 2-D", "te.thread_axis(\"blockIdx.x\") ib.scope_attr(tx, \"thread_extent\", nthread_tx) ib.scope_attr(bx, \"thread_extent\", nthread_bx) tid = bx * max_threads +", "matrix_size = output_ptr.shape[1] * output_ptr.shape[2] batch = tir.floordiv(tid, matrix_size) row = tir.floordiv(tir.indexmod(tid, matrix_size),", "The size of window frame and STFT filter window : relay.Expr A 1-D", "with ib.new_scope(): nthread_tx = max_threads nthread_bx = ceil_div(output_size, max_threads) tx = te.thread_axis(\"threadIdx.x\") bx", "tensor. n_fft : int The size of Fourier transform hop_length : int The", "relay.Expr A 1-D tensor window frame normalized : bool Whether to return the", "0) with ib.for_range(0, win_length) as wlen: output[batch, row, col, 0] += ( window[wlen]", "# regarding copyright ownership. The ASF licenses this file # to you under", "tx = te.thread_axis(\"threadIdx.x\") bx = te.thread_axis(\"blockIdx.x\") ib.scope_attr(tx, \"thread_extent\", nthread_tx) ib.scope_attr(bx, \"thread_extent\", nthread_bx) tid", "more contributor license agreements. See the NOTICE file # distributed with this work", ": int The size of window frame and STFT filter window : relay.Expr", "col * hop_length + wlen] * tir.cos(2 * pi * row * wlen", "output_ptr.shape[2] with ib.new_scope(): nthread_tx = max_threads nthread_bx = ceil_div(output_size, max_threads) tx = te.thread_axis(\"threadIdx.x\")", "is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "of the input. This gives frequency components of the signal as they change", "1] = tir.Cast(data_ptr.dtype, 0) with ib.for_range(0, win_length) as wlen: output[batch, row, col, 0]", "* pi * row * wlen / win_length) ) output[batch, row, col, 1]", "+= ( window[wlen] * data[batch, col * hop_length + wlen] * tir.cos(2 *", "return te.extern( output_shape, [data, window], lambda ins, outs: gen_ir( ins[0], n_fft, hop_length, win_length,", "matrix_size), output_ptr.shape[2]) output[batch, row, col, 0] = tir.Cast(data_ptr.dtype, 0) output[batch, row, col, 1]", "CONDITIONS OF ANY # KIND, either express or implied. See the License for", "[1, 2, 3, 4, 5, 6] window = [4, 3, 2] [n_fft, hop_length,", "Parameters ---------- data : relay.Expr Either a 1-D tensor or a 2-D batch", "work for additional information # regarding copyright ownership. The ASF licenses this file", "output_ptr.shape[2]) output[batch, row, col, 0] = tir.Cast(data_ptr.dtype, 0) output[batch, row, col, 1] =", "window], lambda ins, outs: gen_ir( ins[0], n_fft, hop_length, win_length, ins[1], normalized, onesided, outs[0]", "tir.indexmod(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) output[batch, row, col, 0] = tir.Cast(data_ptr.dtype, 0) output[batch, row, col,", "data : relay.Expr Either a 1-D tensor or a 2-D batch tensor. n_fft", "The STFT computes the Fourier transform of short overlapping windows of the input.", "wlen / win_length) ) with ib.if_scope(normalized): output[batch, row, col, 0] /= tir.sqrt(tir.const(n_fft, \"float32\"))", "normalized : bool Whether to return the normalized STFT results onesided : bool", "licenses this file # to you under the Apache License, Version 2.0 (the", "STFT computes the Fourier transform of short overlapping windows of the input. This", "# \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either", ": int The size of Fourier transform hop_length : int The distance between", "express or implied. See the License for the # specific language governing permissions", "symmetry Returns ------- output : relay.Expr Tensor containing the STFT result Examples --------", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "= te.thread_axis(\"blockIdx.x\") ib.scope_attr(tx, \"thread_extent\", nthread_tx) ib.scope_attr(bx, \"thread_extent\", nthread_bx) tid = bx * max_threads", "tir.floordiv(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) col = tir.indexmod(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) output[batch, row, col, 0] =", "window frame and STFT filter window : relay.Expr A 1-D tensor window frame", "_get_max_threads(batch_row): max_threads = tvm.target.Target.current(allow_none=False).max_num_threads return tir.min(batch_row, max_threads) def stft( data, n_fft, hop_length, win_length,", "you under the Apache License, Version 2.0 (the # \"License\"); you may not", "data = [1, 2, 3, 4, 5, 6] window = [4, 3, 2]", "License is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "window[wlen] * data[batch, col * hop_length + wlen] * tir.sin(2 * pi *", "row = tir.floordiv(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) col = tir.indexmod(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) output[batch, row, col,", "win_length) ) output[batch, row, col, 1] -= ( window[wlen] * data[batch, col *", "/= tir.sqrt(tir.const(n_fft, \"float32\")) return ib.get() output_buf = tir.decl_buffer(output_shape, data.dtype, \"output_buf\") return te.extern( output_shape,", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "normalized, onesided] = [3, 3, 3, False, True] relay.stft(data, n_fft, hop_length, win_length, window,", "under the Apache License, Version 2.0 (the # \"License\"); you may not use", "\"float32\")) output[batch, row, col, 1] /= tir.sqrt(tir.const(n_fft, \"float32\")) return ib.get() output_buf = tir.decl_buffer(output_shape,", "tir.floordiv(tid, matrix_size) row = tir.floordiv(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) col = tir.indexmod(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) output[batch,", "tir.Cast(data_ptr.dtype, 0) output[batch, row, col, 1] = tir.Cast(data_ptr.dtype, 0) with ib.for_range(0, win_length) as", "python data = [1, 2, 3, 4, 5, 6] window = [4, 3,", "License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "or implied. See the License for the # specific language governing permissions and", "): \"\"\" The STFT computes the Fourier transform of short overlapping windows of", "relay.stft(data, n_fft, hop_length, win_length, window, normalized, onesided) -> [[[15.0000, 0.0000], [34.0000, 0.0000]], [[", "max_threads nthread_bx = ceil_div(output_size, max_threads) tx = te.thread_axis(\"threadIdx.x\") bx = te.thread_axis(\"blockIdx.x\") ib.scope_attr(tx, \"thread_extent\",", "distributed under the License is distributed on an # \"AS IS\" BASIS, WITHOUT", "matrix_size), output_ptr.shape[2]) col = tir.indexmod(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) output[batch, row, col, 0] = tir.Cast(data_ptr.dtype,", "tir.Cast(data_ptr.dtype, 0) with ib.for_range(0, win_length) as wlen: output[batch, row, col, 0] += (", "col, 0] += ( window[wlen] * data[batch, col * hop_length + wlen] *", "ib.scope_attr(tx, \"thread_extent\", nthread_tx) ib.scope_attr(bx, \"thread_extent\", nthread_bx) tid = bx * max_threads + tx", "= tir.Cast(data_ptr.dtype, 0) output[batch, row, col, 1] = tir.Cast(data_ptr.dtype, 0) with ib.for_range(0, win_length)", "import te, tir from ..utils import ceil_div def _get_max_threads(batch_row): max_threads = tvm.target.Target.current(allow_none=False).max_num_threads return", "return tir.min(batch_row, max_threads) def stft( data, n_fft, hop_length, win_length, window, normalized, onesided, output_shape,", "Whether to return onesided result or fill with conjugate symmetry Returns ------- output", "output_ptr, ): ib = tir.ir_builder.create() data = ib.buffer_ptr(data_ptr) window = ib.buffer_ptr(window_ptr) output =", "or a 2-D batch tensor. n_fft : int The size of Fourier transform", "col, 0] /= tir.sqrt(tir.const(n_fft, \"float32\")) output[batch, row, col, 1] /= tir.sqrt(tir.const(n_fft, \"float32\")) return", "frame and STFT filter window : relay.Expr A 1-D tensor window frame normalized", "return the normalized STFT results onesided : bool Whether to return onesided result", "-= ( window[wlen] * data[batch, col * hop_length + wlen] * tir.sin(2 *", "input. This gives frequency components of the signal as they change over time.", "of short overlapping windows of the input. This gives frequency components of the", "License. # pylint: disable=invalid-name, too-many-arguments, too-many-nested-blocks, unused-argument \"\"\"STFT operator\"\"\" from math import pi", "* hop_length + wlen] * tir.cos(2 * pi * row * wlen /", "with ib.if_scope(normalized): output[batch, row, col, 0] /= tir.sqrt(tir.const(n_fft, \"float32\")) output[batch, row, col, 1]", "Unless required by applicable law or agreed to in writing, # software distributed", "frame normalized : bool Whether to return the normalized STFT results onesided :", "distributed with this work for additional information # regarding copyright ownership. The ASF", "+ wlen] * tir.cos(2 * pi * row * wlen / win_length) )", "computes the Fourier transform of short overlapping windows of the input. This gives", "regarding copyright ownership. The ASF licenses this file # to you under the", "and limitations # under the License. # pylint: disable=invalid-name, too-many-arguments, too-many-nested-blocks, unused-argument \"\"\"STFT", "tensor or a 2-D batch tensor. n_fft : int The size of Fourier", "# KIND, either express or implied. See the License for the # specific", "this work for additional information # regarding copyright ownership. The ASF licenses this", "col = tir.indexmod(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) output[batch, row, col, 0] = tir.Cast(data_ptr.dtype, 0) output[batch,", "ANY # KIND, either express or implied. See the License for the #", "contributor license agreements. See the NOTICE file # distributed with this work for", "[4, 3, 2] [n_fft, hop_length, win_length, normalized, onesided] = [3, 3, 3, False,", "as they change over time. Parameters ---------- data : relay.Expr Either a 1-D", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "= te.thread_axis(\"threadIdx.x\") bx = te.thread_axis(\"blockIdx.x\") ib.scope_attr(tx, \"thread_extent\", nthread_tx) ib.scope_attr(bx, \"thread_extent\", nthread_bx) tid =", "\"\"\"STFT operator\"\"\" from math import pi import tvm from tvm import te, tir", "overlapping windows of the input. This gives frequency components of the signal as", "change over time. Parameters ---------- data : relay.Expr Either a 1-D tensor or", "transform of short overlapping windows of the input. This gives frequency components of", "= _get_max_threads(output_ptr.shape[0] * output_ptr.shape[1]) output_size = output_ptr.shape[0] * output_ptr.shape[1] * output_ptr.shape[2] with ib.new_scope():", "operator\"\"\" from math import pi import tvm from tvm import te, tir from", "-1.7321]]] \"\"\" def gen_ir( data_ptr, n_fft, hop_length, win_length, window_ptr, normalized, onesided, output_ptr, ):", "< output_size): matrix_size = output_ptr.shape[1] * output_ptr.shape[2] batch = tir.floordiv(tid, matrix_size) row =", "See the License for the # specific language governing permissions and limitations #", "containing the STFT result Examples -------- .. code-block:: python data = [1, 2,", "distance between neighboring sliding window frames win_length : int The size of window", "window frames win_length : int The size of window frame and STFT filter", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or", "2.0 (the # \"License\"); you may not use this file except in compliance", "specific language governing permissions and limitations # under the License. # pylint: disable=invalid-name,", "normalized STFT results onesided : bool Whether to return onesided result or fill", "Examples -------- .. code-block:: python data = [1, 2, 3, 4, 5, 6]", "n_fft, hop_length, win_length, window, normalized, onesided, output_shape, ): \"\"\" The STFT computes the", "0] /= tir.sqrt(tir.const(n_fft, \"float32\")) output[batch, row, col, 1] /= tir.sqrt(tir.const(n_fft, \"float32\")) return ib.get()", "to return the normalized STFT results onesided : bool Whether to return onesided", "/ win_length) ) with ib.if_scope(normalized): output[batch, row, col, 0] /= tir.sqrt(tir.const(n_fft, \"float32\")) output[batch,", ": relay.Expr Either a 1-D tensor or a 2-D batch tensor. n_fft :", "KIND, either express or implied. See the License for the # specific language", "# pylint: disable=invalid-name, too-many-arguments, too-many-nested-blocks, unused-argument \"\"\"STFT operator\"\"\" from math import pi import", "/= tir.sqrt(tir.const(n_fft, \"float32\")) output[batch, row, col, 1] /= tir.sqrt(tir.const(n_fft, \"float32\")) return ib.get() output_buf", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "matrix_size) row = tir.floordiv(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) col = tir.indexmod(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) output[batch, row,", "------- output : relay.Expr Tensor containing the STFT result Examples -------- .. code-block::", "n_fft, hop_length, win_length, ins[1], normalized, onesided, outs[0] ), dtype=[data.dtype], out_buffers=[output_buf], name=\"stft_cuda\", tag=\"stft_cuda\", )", "gen_ir( ins[0], n_fft, hop_length, win_length, ins[1], normalized, onesided, outs[0] ), dtype=[data.dtype], out_buffers=[output_buf], name=\"stft_cuda\",", "+ tx with ib.if_scope(tid < output_size): matrix_size = output_ptr.shape[1] * output_ptr.shape[2] batch =", "= tir.ir_builder.create() data = ib.buffer_ptr(data_ptr) window = ib.buffer_ptr(window_ptr) output = ib.buffer_ptr(output_ptr) max_threads =", "compliance # with the License. You may obtain a copy of the License", "hop_length, win_length, normalized, onesided] = [3, 3, 3, False, True] relay.stft(data, n_fft, hop_length,", "bool Whether to return the normalized STFT results onesided : bool Whether to", "WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See", "with the License. You may obtain a copy of the License at #", "information # regarding copyright ownership. The ASF licenses this file # to you", "0] = tir.Cast(data_ptr.dtype, 0) output[batch, row, col, 1] = tir.Cast(data_ptr.dtype, 0) with ib.for_range(0,", "window frame normalized : bool Whether to return the normalized STFT results onesided", "permissions and limitations # under the License. # pylint: disable=invalid-name, too-many-arguments, too-many-nested-blocks, unused-argument", "\"thread_extent\", nthread_tx) ib.scope_attr(bx, \"thread_extent\", nthread_bx) tid = bx * max_threads + tx with", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "* wlen / win_length) ) output[batch, row, col, 1] -= ( window[wlen] *", "result or fill with conjugate symmetry Returns ------- output : relay.Expr Tensor containing", "one # or more contributor license agreements. See the NOTICE file # distributed", "pi import tvm from tvm import te, tir from ..utils import ceil_div def", "except in compliance # with the License. You may obtain a copy of", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "you may not use this file except in compliance # with the License.", "an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND,", "ib.if_scope(normalized): output[batch, row, col, 0] /= tir.sqrt(tir.const(n_fft, \"float32\")) output[batch, row, col, 1] /=", "* tir.sin(2 * pi * row * wlen / win_length) ) with ib.if_scope(normalized):", "data.dtype, \"output_buf\") return te.extern( output_shape, [data, window], lambda ins, outs: gen_ir( ins[0], n_fft,", "as wlen: output[batch, row, col, 0] += ( window[wlen] * data[batch, col *", "and STFT filter window : relay.Expr A 1-D tensor window frame normalized :", "row, col, 0] += ( window[wlen] * data[batch, col * hop_length + wlen]", "* output_ptr.shape[2] batch = tir.floordiv(tid, matrix_size) row = tir.floordiv(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) col =", "wlen] * tir.cos(2 * pi * row * wlen / win_length) ) output[batch,", "output[batch, row, col, 1] = tir.Cast(data_ptr.dtype, 0) with ib.for_range(0, win_length) as wlen: output[batch,", "col * hop_length + wlen] * tir.sin(2 * pi * row * wlen", "def stft( data, n_fft, hop_length, win_length, window, normalized, onesided, output_shape, ): \"\"\" The", "this file # to you under the Apache License, Version 2.0 (the #", "ib.for_range(0, win_length) as wlen: output[batch, row, col, 0] += ( window[wlen] * data[batch,", "= tir.Cast(data_ptr.dtype, 0) with ib.for_range(0, win_length) as wlen: output[batch, row, col, 0] +=", "/ win_length) ) output[batch, row, col, 1] -= ( window[wlen] * data[batch, col", "# # Unless required by applicable law or agreed to in writing, #", "STFT filter window : relay.Expr A 1-D tensor window frame normalized : bool", "Version 2.0 (the # \"License\"); you may not use this file except in", "for the # specific language governing permissions and limitations # under the License.", ": bool Whether to return the normalized STFT results onesided : bool Whether", "n_fft : int The size of Fourier transform hop_length : int The distance", "they change over time. Parameters ---------- data : relay.Expr Either a 1-D tensor", "output[batch, row, col, 0] = tir.Cast(data_ptr.dtype, 0) output[batch, row, col, 1] = tir.Cast(data_ptr.dtype,", "output_size): matrix_size = output_ptr.shape[1] * output_ptr.shape[2] batch = tir.floordiv(tid, matrix_size) row = tir.floordiv(tir.indexmod(tid,", "or fill with conjugate symmetry Returns ------- output : relay.Expr Tensor containing the", "OF ANY # KIND, either express or implied. See the License for the", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied.", "ceil_div def _get_max_threads(batch_row): max_threads = tvm.target.Target.current(allow_none=False).max_num_threads return tir.min(batch_row, max_threads) def stft( data, n_fft,", ": relay.Expr A 1-D tensor window frame normalized : bool Whether to return", "= [1, 2, 3, 4, 5, 6] window = [4, 3, 2] [n_fft,", "from ..utils import ceil_div def _get_max_threads(batch_row): max_threads = tvm.target.Target.current(allow_none=False).max_num_threads return tir.min(batch_row, max_threads) def", "-------- .. code-block:: python data = [1, 2, 3, 4, 5, 6] window", "# specific language governing permissions and limitations # under the License. # pylint:", "tx with ib.if_scope(tid < output_size): matrix_size = output_ptr.shape[1] * output_ptr.shape[2] batch = tir.floordiv(tid,", "License, Version 2.0 (the # \"License\"); you may not use this file except", "batch = tir.floordiv(tid, matrix_size) row = tir.floordiv(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) col = tir.indexmod(tir.indexmod(tid, matrix_size),", "relay.Expr Tensor containing the STFT result Examples -------- .. code-block:: python data =", "This gives frequency components of the signal as they change over time. Parameters", "this file except in compliance # with the License. You may obtain a", "win_length, window_ptr, normalized, onesided, output_ptr, ): ib = tir.ir_builder.create() data = ib.buffer_ptr(data_ptr) window", "may not use this file except in compliance # with the License. You", "3, 4, 5, 6] window = [4, 3, 2] [n_fft, hop_length, win_length, normalized,", "nthread_tx = max_threads nthread_bx = ceil_div(output_size, max_threads) tx = te.thread_axis(\"threadIdx.x\") bx = te.thread_axis(\"blockIdx.x\")", "ASF licenses this file # to you under the Apache License, Version 2.0", "The distance between neighboring sliding window frames win_length : int The size of", "the input. This gives frequency components of the signal as they change over", "= tir.floordiv(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) col = tir.indexmod(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) output[batch, row, col, 0]", "import ceil_div def _get_max_threads(batch_row): max_threads = tvm.target.Target.current(allow_none=False).max_num_threads return tir.min(batch_row, max_threads) def stft( data,", "hop_length + wlen] * tir.sin(2 * pi * row * wlen / win_length)", "between neighboring sliding window frames win_length : int The size of window frame", "0.0000], [34.0000, 0.0000]], [[ 4.5000, 0.8660], [ 1.0000, -1.7321]]] \"\"\" def gen_ir( data_ptr,", "* output_ptr.shape[2] with ib.new_scope(): nthread_tx = max_threads nthread_bx = ceil_div(output_size, max_threads) tx =", "# distributed with this work for additional information # regarding copyright ownership. The", ") with ib.if_scope(normalized): output[batch, row, col, 0] /= tir.sqrt(tir.const(n_fft, \"float32\")) output[batch, row, col,", "---------- data : relay.Expr Either a 1-D tensor or a 2-D batch tensor.", "* max_threads + tx with ib.if_scope(tid < output_size): matrix_size = output_ptr.shape[1] * output_ptr.shape[2]", "from math import pi import tvm from tvm import te, tir from ..utils", "The size of Fourier transform hop_length : int The distance between neighboring sliding", "on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY #", "= tvm.target.Target.current(allow_none=False).max_num_threads return tir.min(batch_row, max_threads) def stft( data, n_fft, hop_length, win_length, window, normalized,", "output_ptr.shape[1] * output_ptr.shape[2] batch = tir.floordiv(tid, matrix_size) row = tir.floordiv(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) col", "with this work for additional information # regarding copyright ownership. The ASF licenses", "pi * row * wlen / win_length) ) output[batch, row, col, 1] -=", "max_threads + tx with ib.if_scope(tid < output_size): matrix_size = output_ptr.shape[1] * output_ptr.shape[2] batch", "the License. You may obtain a copy of the License at # #", "relay.Expr Either a 1-D tensor or a 2-D batch tensor. n_fft : int", "ib.new_scope(): nthread_tx = max_threads nthread_bx = ceil_div(output_size, max_threads) tx = te.thread_axis(\"threadIdx.x\") bx =", "data, n_fft, hop_length, win_length, window, normalized, onesided, output_shape, ): \"\"\" The STFT computes", "..utils import ceil_div def _get_max_threads(batch_row): max_threads = tvm.target.Target.current(allow_none=False).max_num_threads return tir.min(batch_row, max_threads) def stft(", "agreements. See the NOTICE file # distributed with this work for additional information", "row * wlen / win_length) ) with ib.if_scope(normalized): output[batch, row, col, 0] /=", "output_buf = tir.decl_buffer(output_shape, data.dtype, \"output_buf\") return te.extern( output_shape, [data, window], lambda ins, outs:", "= output_ptr.shape[0] * output_ptr.shape[1] * output_ptr.shape[2] with ib.new_scope(): nthread_tx = max_threads nthread_bx =", "writing, # software distributed under the License is distributed on an # \"AS", "max_threads = tvm.target.Target.current(allow_none=False).max_num_threads return tir.min(batch_row, max_threads) def stft( data, n_fft, hop_length, win_length, window,", "with conjugate symmetry Returns ------- output : relay.Expr Tensor containing the STFT result", "output_size = output_ptr.shape[0] * output_ptr.shape[1] * output_ptr.shape[2] with ib.new_scope(): nthread_tx = max_threads nthread_bx", "components of the signal as they change over time. Parameters ---------- data :", "hop_length : int The distance between neighboring sliding window frames win_length : int", "bx = te.thread_axis(\"blockIdx.x\") ib.scope_attr(tx, \"thread_extent\", nthread_tx) ib.scope_attr(bx, \"thread_extent\", nthread_bx) tid = bx *", "conjugate symmetry Returns ------- output : relay.Expr Tensor containing the STFT result Examples", "tir.sqrt(tir.const(n_fft, \"float32\")) return ib.get() output_buf = tir.decl_buffer(output_shape, data.dtype, \"output_buf\") return te.extern( output_shape, [data,", "[ 1.0000, -1.7321]]] \"\"\" def gen_ir( data_ptr, n_fft, hop_length, win_length, window_ptr, normalized, onesided,", "ib.buffer_ptr(output_ptr) max_threads = _get_max_threads(output_ptr.shape[0] * output_ptr.shape[1]) output_size = output_ptr.shape[0] * output_ptr.shape[1] * output_ptr.shape[2]", "NOTICE file # distributed with this work for additional information # regarding copyright", "import tvm from tvm import te, tir from ..utils import ceil_div def _get_max_threads(batch_row):", "col, 1] -= ( window[wlen] * data[batch, col * hop_length + wlen] *", "data[batch, col * hop_length + wlen] * tir.cos(2 * pi * row *", "te, tir from ..utils import ceil_div def _get_max_threads(batch_row): max_threads = tvm.target.Target.current(allow_none=False).max_num_threads return tir.min(batch_row,", "* data[batch, col * hop_length + wlen] * tir.sin(2 * pi * row", "3, False, True] relay.stft(data, n_fft, hop_length, win_length, window, normalized, onesided) -> [[[15.0000, 0.0000],", "row, col, 1] -= ( window[wlen] * data[batch, col * hop_length + wlen]", "frames win_length : int The size of window frame and STFT filter window", "\"\"\" The STFT computes the Fourier transform of short overlapping windows of the", "the Apache License, Version 2.0 (the # \"License\"); you may not use this", "gen_ir( data_ptr, n_fft, hop_length, win_length, window_ptr, normalized, onesided, output_ptr, ): ib = tir.ir_builder.create()", "tir.sin(2 * pi * row * wlen / win_length) ) with ib.if_scope(normalized): output[batch,", "hop_length, win_length, window_ptr, normalized, onesided, output_ptr, ): ib = tir.ir_builder.create() data = ib.buffer_ptr(data_ptr)", "data = ib.buffer_ptr(data_ptr) window = ib.buffer_ptr(window_ptr) output = ib.buffer_ptr(output_ptr) max_threads = _get_max_threads(output_ptr.shape[0] *", "Tensor containing the STFT result Examples -------- .. code-block:: python data = [1,", "return ib.get() output_buf = tir.decl_buffer(output_shape, data.dtype, \"output_buf\") return te.extern( output_shape, [data, window], lambda", "int The size of Fourier transform hop_length : int The distance between neighboring", "the STFT result Examples -------- .. code-block:: python data = [1, 2, 3,", "code-block:: python data = [1, 2, 3, 4, 5, 6] window = [4,", "ib.buffer_ptr(data_ptr) window = ib.buffer_ptr(window_ptr) output = ib.buffer_ptr(output_ptr) max_threads = _get_max_threads(output_ptr.shape[0] * output_ptr.shape[1]) output_size", "with ib.for_range(0, win_length) as wlen: output[batch, row, col, 0] += ( window[wlen] *", "tvm import te, tir from ..utils import ceil_div def _get_max_threads(batch_row): max_threads = tvm.target.Target.current(allow_none=False).max_num_threads", "The ASF licenses this file # to you under the Apache License, Version", "file except in compliance # with the License. You may obtain a copy", "output_shape, [data, window], lambda ins, outs: gen_ir( ins[0], n_fft, hop_length, win_length, ins[1], normalized,", "frequency components of the signal as they change over time. Parameters ---------- data", "= max_threads nthread_bx = ceil_div(output_size, max_threads) tx = te.thread_axis(\"threadIdx.x\") bx = te.thread_axis(\"blockIdx.x\") ib.scope_attr(tx,", "file # to you under the Apache License, Version 2.0 (the # \"License\");", ": relay.Expr Tensor containing the STFT result Examples -------- .. code-block:: python data", "Returns ------- output : relay.Expr Tensor containing the STFT result Examples -------- ..", "win_length, normalized, onesided] = [3, 3, 3, False, True] relay.stft(data, n_fft, hop_length, win_length,", "4.5000, 0.8660], [ 1.0000, -1.7321]]] \"\"\" def gen_ir( data_ptr, n_fft, hop_length, win_length, window_ptr,", "tir.decl_buffer(output_shape, data.dtype, \"output_buf\") return te.extern( output_shape, [data, window], lambda ins, outs: gen_ir( ins[0],", "disable=invalid-name, too-many-arguments, too-many-nested-blocks, unused-argument \"\"\"STFT operator\"\"\" from math import pi import tvm from", "* hop_length + wlen] * tir.sin(2 * pi * row * wlen /", "nthread_bx = ceil_div(output_size, max_threads) tx = te.thread_axis(\"threadIdx.x\") bx = te.thread_axis(\"blockIdx.x\") ib.scope_attr(tx, \"thread_extent\", nthread_tx)", "output_ptr.shape[0] * output_ptr.shape[1] * output_ptr.shape[2] with ib.new_scope(): nthread_tx = max_threads nthread_bx = ceil_div(output_size,", "row, col, 1] = tir.Cast(data_ptr.dtype, 0) with ib.for_range(0, win_length) as wlen: output[batch, row,", "(the # \"License\"); you may not use this file except in compliance #", "= ib.buffer_ptr(output_ptr) max_threads = _get_max_threads(output_ptr.shape[0] * output_ptr.shape[1]) output_size = output_ptr.shape[0] * output_ptr.shape[1] *", "= tir.indexmod(tir.indexmod(tid, matrix_size), output_ptr.shape[2]) output[batch, row, col, 0] = tir.Cast(data_ptr.dtype, 0) output[batch, row,", "window_ptr, normalized, onesided, output_ptr, ): ib = tir.ir_builder.create() data = ib.buffer_ptr(data_ptr) window =", "int The size of window frame and STFT filter window : relay.Expr A", "onesided] = [3, 3, 3, False, True] relay.stft(data, n_fft, hop_length, win_length, window, normalized,", "= bx * max_threads + tx with ib.if_scope(tid < output_size): matrix_size = output_ptr.shape[1]", "tir.min(batch_row, max_threads) def stft( data, n_fft, hop_length, win_length, window, normalized, onesided, output_shape, ):", "onesided : bool Whether to return onesided result or fill with conjugate symmetry", "of window frame and STFT filter window : relay.Expr A 1-D tensor window", "wlen] * tir.sin(2 * pi * row * wlen / win_length) ) with", "output_shape, ): \"\"\" The STFT computes the Fourier transform of short overlapping windows", "filter window : relay.Expr A 1-D tensor window frame normalized : bool Whether", "law or agreed to in writing, # software distributed under the License is", "sliding window frames win_length : int The size of window frame and STFT", "# software distributed under the License is distributed on an # \"AS IS\"", "to you under the Apache License, Version 2.0 (the # \"License\"); you may", "ib.get() output_buf = tir.decl_buffer(output_shape, data.dtype, \"output_buf\") return te.extern( output_shape, [data, window], lambda ins,", "file # distributed with this work for additional information # regarding copyright ownership.", "ib.if_scope(tid < output_size): matrix_size = output_ptr.shape[1] * output_ptr.shape[2] batch = tir.floordiv(tid, matrix_size) row", "* wlen / win_length) ) with ib.if_scope(normalized): output[batch, row, col, 0] /= tir.sqrt(tir.const(n_fft,", "# Licensed to the Apache Software Foundation (ASF) under one # or more", "outs: gen_ir( ins[0], n_fft, hop_length, win_length, ins[1], normalized, onesided, outs[0] ), dtype=[data.dtype], out_buffers=[output_buf],", "to return onesided result or fill with conjugate symmetry Returns ------- output :", "import pi import tvm from tvm import te, tir from ..utils import ceil_div", "copyright ownership. The ASF licenses this file # to you under the Apache", "ownership. The ASF licenses this file # to you under the Apache License,", "def gen_ir( data_ptr, n_fft, hop_length, win_length, window_ptr, normalized, onesided, output_ptr, ): ib =", "* row * wlen / win_length) ) output[batch, row, col, 1] -= (", "short overlapping windows of the input. This gives frequency components of the signal", "normalized, onesided, output_shape, ): \"\"\" The STFT computes the Fourier transform of short", "math import pi import tvm from tvm import te, tir from ..utils import", "3, 2] [n_fft, hop_length, win_length, normalized, onesided] = [3, 3, 3, False, True]", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "onesided, output_shape, ): \"\"\" The STFT computes the Fourier transform of short overlapping", "bool Whether to return onesided result or fill with conjugate symmetry Returns -------", "gives frequency components of the signal as they change over time. Parameters ----------", "ib = tir.ir_builder.create() data = ib.buffer_ptr(data_ptr) window = ib.buffer_ptr(window_ptr) output = ib.buffer_ptr(output_ptr) max_threads", ") output[batch, row, col, 1] -= ( window[wlen] * data[batch, col * hop_length", "* row * wlen / win_length) ) with ib.if_scope(normalized): output[batch, row, col, 0]", "# Unless required by applicable law or agreed to in writing, # software", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "4, 5, 6] window = [4, 3, 2] [n_fft, hop_length, win_length, normalized, onesided]", "_get_max_threads(output_ptr.shape[0] * output_ptr.shape[1]) output_size = output_ptr.shape[0] * output_ptr.shape[1] * output_ptr.shape[2] with ib.new_scope(): nthread_tx", "0.0000]], [[ 4.5000, 0.8660], [ 1.0000, -1.7321]]] \"\"\" def gen_ir( data_ptr, n_fft, hop_length,", "to in writing, # software distributed under the License is distributed on an", "[[ 4.5000, 0.8660], [ 1.0000, -1.7321]]] \"\"\" def gen_ir( data_ptr, n_fft, hop_length, win_length,", "agreed to in writing, # software distributed under the License is distributed on", "ib.buffer_ptr(window_ptr) output = ib.buffer_ptr(output_ptr) max_threads = _get_max_threads(output_ptr.shape[0] * output_ptr.shape[1]) output_size = output_ptr.shape[0] *", "batch tensor. n_fft : int The size of Fourier transform hop_length : int", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express", "3, 3, False, True] relay.stft(data, n_fft, hop_length, win_length, window, normalized, onesided) -> [[[15.0000,", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "to the Apache Software Foundation (ASF) under one # or more contributor license", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "win_length, window, normalized, onesided, output_shape, ): \"\"\" The STFT computes the Fourier transform", "= ib.buffer_ptr(data_ptr) window = ib.buffer_ptr(window_ptr) output = ib.buffer_ptr(output_ptr) max_threads = _get_max_threads(output_ptr.shape[0] * output_ptr.shape[1])", "output_ptr.shape[1]) output_size = output_ptr.shape[0] * output_ptr.shape[1] * output_ptr.shape[2] with ib.new_scope(): nthread_tx = max_threads", "col, 1] /= tir.sqrt(tir.const(n_fft, \"float32\")) return ib.get() output_buf = tir.decl_buffer(output_shape, data.dtype, \"output_buf\") return", "ceil_div(output_size, max_threads) tx = te.thread_axis(\"threadIdx.x\") bx = te.thread_axis(\"blockIdx.x\") ib.scope_attr(tx, \"thread_extent\", nthread_tx) ib.scope_attr(bx, \"thread_extent\",", "output = ib.buffer_ptr(output_ptr) max_threads = _get_max_threads(output_ptr.shape[0] * output_ptr.shape[1]) output_size = output_ptr.shape[0] * output_ptr.shape[1]", "neighboring sliding window frames win_length : int The size of window frame and", "ins[0], n_fft, hop_length, win_length, ins[1], normalized, onesided, outs[0] ), dtype=[data.dtype], out_buffers=[output_buf], name=\"stft_cuda\", tag=\"stft_cuda\",", "output : relay.Expr Tensor containing the STFT result Examples -------- .. code-block:: python", "use this file except in compliance # with the License. You may obtain", "\"float32\")) return ib.get() output_buf = tir.decl_buffer(output_shape, data.dtype, \"output_buf\") return te.extern( output_shape, [data, window],", "Software Foundation (ASF) under one # or more contributor license agreements. See the", "STFT result Examples -------- .. code-block:: python data = [1, 2, 3, 4,", "the License is distributed on an # \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "wlen / win_length) ) output[batch, row, col, 1] -= ( window[wlen] * data[batch,", "= tir.decl_buffer(output_shape, data.dtype, \"output_buf\") return te.extern( output_shape, [data, window], lambda ins, outs: gen_ir(", "output[batch, row, col, 1] -= ( window[wlen] * data[batch, col * hop_length +", "output[batch, row, col, 0] /= tir.sqrt(tir.const(n_fft, \"float32\")) output[batch, row, col, 1] /= tir.sqrt(tir.const(n_fft,", "ib.scope_attr(bx, \"thread_extent\", nthread_bx) tid = bx * max_threads + tx with ib.if_scope(tid <", "tensor window frame normalized : bool Whether to return the normalized STFT results", "= ceil_div(output_size, max_threads) tx = te.thread_axis(\"threadIdx.x\") bx = te.thread_axis(\"blockIdx.x\") ib.scope_attr(tx, \"thread_extent\", nthread_tx) ib.scope_attr(bx,", "[data, window], lambda ins, outs: gen_ir( ins[0], n_fft, hop_length, win_length, ins[1], normalized, onesided,", "the # specific language governing permissions and limitations # under the License. #", "1-D tensor or a 2-D batch tensor. n_fft : int The size of", "size of Fourier transform hop_length : int The distance between neighboring sliding window", "See the NOTICE file # distributed with this work for additional information #", "window, normalized, onesided, output_shape, ): \"\"\" The STFT computes the Fourier transform of", "-> [[[15.0000, 0.0000], [34.0000, 0.0000]], [[ 4.5000, 0.8660], [ 1.0000, -1.7321]]] \"\"\" def", "Either a 1-D tensor or a 2-D batch tensor. n_fft : int The", "the NOTICE file # distributed with this work for additional information # regarding", "in writing, # software distributed under the License is distributed on an #", "the Apache Software Foundation (ASF) under one # or more contributor license agreements.", "False, True] relay.stft(data, n_fft, hop_length, win_length, window, normalized, onesided) -> [[[15.0000, 0.0000], [34.0000,", "def _get_max_threads(batch_row): max_threads = tvm.target.Target.current(allow_none=False).max_num_threads return tir.min(batch_row, max_threads) def stft( data, n_fft, hop_length,", "n_fft, hop_length, win_length, window, normalized, onesided) -> [[[15.0000, 0.0000], [34.0000, 0.0000]], [[ 4.5000,", "\"thread_extent\", nthread_bx) tid = bx * max_threads + tx with ib.if_scope(tid < output_size):", ": bool Whether to return onesided result or fill with conjugate symmetry Returns", "Fourier transform hop_length : int The distance between neighboring sliding window frames win_length", "normalized, onesided) -> [[[15.0000, 0.0000], [34.0000, 0.0000]], [[ 4.5000, 0.8660], [ 1.0000, -1.7321]]]", "n_fft, hop_length, win_length, window_ptr, normalized, onesided, output_ptr, ): ib = tir.ir_builder.create() data =", ": int The distance between neighboring sliding window frames win_length : int The", "STFT results onesided : bool Whether to return onesided result or fill with", "under the License. # pylint: disable=invalid-name, too-many-arguments, too-many-nested-blocks, unused-argument \"\"\"STFT operator\"\"\" from math", "te.thread_axis(\"threadIdx.x\") bx = te.thread_axis(\"blockIdx.x\") ib.scope_attr(tx, \"thread_extent\", nthread_tx) ib.scope_attr(bx, \"thread_extent\", nthread_bx) tid = bx", "2-D batch tensor. n_fft : int The size of Fourier transform hop_length :", "col, 1] = tir.Cast(data_ptr.dtype, 0) with ib.for_range(0, win_length) as wlen: output[batch, row, col,", "* data[batch, col * hop_length + wlen] * tir.cos(2 * pi * row", "the signal as they change over time. Parameters ---------- data : relay.Expr Either", "result Examples -------- .. code-block:: python data = [1, 2, 3, 4, 5,", "# with the License. You may obtain a copy of the License at", "of the signal as they change over time. Parameters ---------- data : relay.Expr", "wlen: output[batch, row, col, 0] += ( window[wlen] * data[batch, col * hop_length", "0) output[batch, row, col, 1] = tir.Cast(data_ptr.dtype, 0) with ib.for_range(0, win_length) as wlen:", "with ib.if_scope(tid < output_size): matrix_size = output_ptr.shape[1] * output_ptr.shape[2] batch = tir.floordiv(tid, matrix_size)", "data[batch, col * hop_length + wlen] * tir.sin(2 * pi * row *", "hop_length, win_length, window, normalized, onesided) -> [[[15.0000, 0.0000], [34.0000, 0.0000]], [[ 4.5000, 0.8660],", "Apache License, Version 2.0 (the # \"License\"); you may not use this file", "a 1-D tensor or a 2-D batch tensor. n_fft : int The size", "onesided result or fill with conjugate symmetry Returns ------- output : relay.Expr Tensor", "<gh_stars>1000+ # Licensed to the Apache Software Foundation (ASF) under one # or", "pi * row * wlen / win_length) ) with ib.if_scope(normalized): output[batch, row, col,", "under one # or more contributor license agreements. See the NOTICE file #", "# to you under the Apache License, Version 2.0 (the # \"License\"); you", "required by applicable law or agreed to in writing, # software distributed under", "the Fourier transform of short overlapping windows of the input. This gives frequency", "1.0000, -1.7321]]] \"\"\" def gen_ir( data_ptr, n_fft, hop_length, win_length, window_ptr, normalized, onesided, output_ptr,", ".. code-block:: python data = [1, 2, 3, 4, 5, 6] window =", "tvm from tvm import te, tir from ..utils import ceil_div def _get_max_threads(batch_row): max_threads", "by applicable law or agreed to in writing, # software distributed under the", "for additional information # regarding copyright ownership. The ASF licenses this file #", "0] += ( window[wlen] * data[batch, col * hop_length + wlen] * tir.cos(2", "Whether to return the normalized STFT results onesided : bool Whether to return", "1] /= tir.sqrt(tir.const(n_fft, \"float32\")) return ib.get() output_buf = tir.decl_buffer(output_shape, data.dtype, \"output_buf\") return te.extern(", "the License for the # specific language governing permissions and limitations # under", "applicable law or agreed to in writing, # software distributed under the License", "data_ptr, n_fft, hop_length, win_length, window_ptr, normalized, onesided, output_ptr, ): ib = tir.ir_builder.create() data", "row * wlen / win_length) ) output[batch, row, col, 1] -= ( window[wlen]", "* tir.cos(2 * pi * row * wlen / win_length) ) output[batch, row,", "# under the License. # pylint: disable=invalid-name, too-many-arguments, too-many-nested-blocks, unused-argument \"\"\"STFT operator\"\"\" from", "A 1-D tensor window frame normalized : bool Whether to return the normalized", "nthread_tx) ib.scope_attr(bx, \"thread_extent\", nthread_bx) tid = bx * max_threads + tx with ib.if_scope(tid", "win_length, window, normalized, onesided) -> [[[15.0000, 0.0000], [34.0000, 0.0000]], [[ 4.5000, 0.8660], [", "windows of the input. This gives frequency components of the signal as they", "bx * max_threads + tx with ib.if_scope(tid < output_size): matrix_size = output_ptr.shape[1] *", "unused-argument \"\"\"STFT operator\"\"\" from math import pi import tvm from tvm import te," ]
[ "from spyder import Spyder from Kite import config import tushare as ts ts.set_token(config.TUSHARE_TOKEN)", "self.db_obj.create_col(self.db, config.COLLECTION_NAME_STOCK_BASIC_INFO) def get_stock_code_info(self): stock_info_a_code_name_df = ak.stock_info_a_code_name() for _id in range(stock_info_a_code_name_df.shape[0]): _dict =", "__init__ from spyder import Spyder from Kite import config import tushare as ts", "in range(stock_info_a_code_name_df.shape[0]): _dict = stock_info_a_code_name_df.iloc[_id].to_dict() self.col.insert_one(_dict) def get_historical_news(self): pass if __name__ == \"__main__\":", "class StockInfoSpyder(Spyder): def __init__(self): super(StockInfoSpyder, self).__init__() self.db = self.db_obj.create_db(config.TUSHARE_DATABASE_NAME) self.col_basic_info = self.db_obj.create_col(self.db, config.COLLECTION_NAME_STOCK_BASIC_INFO)", "stock_info_a_code_name_df = ak.stock_info_a_code_name() for _id in range(stock_info_a_code_name_df.shape[0]): _dict = stock_info_a_code_name_df.iloc[_id].to_dict() self.col.insert_one(_dict) def get_historical_news(self):", "self.col_basic_info = self.db_obj.create_col(self.db, config.COLLECTION_NAME_STOCK_BASIC_INFO) def get_stock_code_info(self): stock_info_a_code_name_df = ak.stock_info_a_code_name() for _id in range(stock_info_a_code_name_df.shape[0]):", "ak class StockInfoSpyder(Spyder): def __init__(self): super(StockInfoSpyder, self).__init__() self.db = self.db_obj.create_db(config.TUSHARE_DATABASE_NAME) self.col_basic_info = self.db_obj.create_col(self.db,", "tushare as ts ts.set_token(config.TUSHARE_TOKEN) import akshare as ak class StockInfoSpyder(Spyder): def __init__(self): super(StockInfoSpyder,", "StockInfoSpyder(Spyder): def __init__(self): super(StockInfoSpyder, self).__init__() self.db = self.db_obj.create_db(config.TUSHARE_DATABASE_NAME) self.col_basic_info = self.db_obj.create_col(self.db, config.COLLECTION_NAME_STOCK_BASIC_INFO) def", "config import tushare as ts ts.set_token(config.TUSHARE_TOKEN) import akshare as ak class StockInfoSpyder(Spyder): def", "import tushare as ts ts.set_token(config.TUSHARE_TOKEN) import akshare as ak class StockInfoSpyder(Spyder): def __init__(self):", "__init__(self): super(StockInfoSpyder, self).__init__() self.db = self.db_obj.create_db(config.TUSHARE_DATABASE_NAME) self.col_basic_info = self.db_obj.create_col(self.db, config.COLLECTION_NAME_STOCK_BASIC_INFO) def get_stock_code_info(self): stock_info_a_code_name_df", "ts.set_token(config.TUSHARE_TOKEN) import akshare as ak class StockInfoSpyder(Spyder): def __init__(self): super(StockInfoSpyder, self).__init__() self.db =", "def get_stock_code_info(self): stock_info_a_code_name_df = ak.stock_info_a_code_name() for _id in range(stock_info_a_code_name_df.shape[0]): _dict = stock_info_a_code_name_df.iloc[_id].to_dict() self.col.insert_one(_dict)", "import Spyder from Kite import config import tushare as ts ts.set_token(config.TUSHARE_TOKEN) import akshare", "self.db = self.db_obj.create_db(config.TUSHARE_DATABASE_NAME) self.col_basic_info = self.db_obj.create_col(self.db, config.COLLECTION_NAME_STOCK_BASIC_INFO) def get_stock_code_info(self): stock_info_a_code_name_df = ak.stock_info_a_code_name() for", "self).__init__() self.db = self.db_obj.create_db(config.TUSHARE_DATABASE_NAME) self.col_basic_info = self.db_obj.create_col(self.db, config.COLLECTION_NAME_STOCK_BASIC_INFO) def get_stock_code_info(self): stock_info_a_code_name_df = ak.stock_info_a_code_name()", "from Kite import config import tushare as ts ts.set_token(config.TUSHARE_TOKEN) import akshare as ak", "_id in range(stock_info_a_code_name_df.shape[0]): _dict = stock_info_a_code_name_df.iloc[_id].to_dict() self.col.insert_one(_dict) def get_historical_news(self): pass if __name__ ==", "self.db_obj.create_db(config.TUSHARE_DATABASE_NAME) self.col_basic_info = self.db_obj.create_col(self.db, config.COLLECTION_NAME_STOCK_BASIC_INFO) def get_stock_code_info(self): stock_info_a_code_name_df = ak.stock_info_a_code_name() for _id in", "as ak class StockInfoSpyder(Spyder): def __init__(self): super(StockInfoSpyder, self).__init__() self.db = self.db_obj.create_db(config.TUSHARE_DATABASE_NAME) self.col_basic_info =", "= self.db_obj.create_col(self.db, config.COLLECTION_NAME_STOCK_BASIC_INFO) def get_stock_code_info(self): stock_info_a_code_name_df = ak.stock_info_a_code_name() for _id in range(stock_info_a_code_name_df.shape[0]): _dict", "\"\"\" https://waditu.com/document/2 \"\"\" import __init__ from spyder import Spyder from Kite import config", "import __init__ from spyder import Spyder from Kite import config import tushare as", "import config import tushare as ts ts.set_token(config.TUSHARE_TOKEN) import akshare as ak class StockInfoSpyder(Spyder):", "spyder import Spyder from Kite import config import tushare as ts ts.set_token(config.TUSHARE_TOKEN) import", "= ak.stock_info_a_code_name() for _id in range(stock_info_a_code_name_df.shape[0]): _dict = stock_info_a_code_name_df.iloc[_id].to_dict() self.col.insert_one(_dict) def get_historical_news(self): pass", "ak.stock_info_a_code_name() for _id in range(stock_info_a_code_name_df.shape[0]): _dict = stock_info_a_code_name_df.iloc[_id].to_dict() self.col.insert_one(_dict) def get_historical_news(self): pass if", "Kite import config import tushare as ts ts.set_token(config.TUSHARE_TOKEN) import akshare as ak class", "akshare as ak class StockInfoSpyder(Spyder): def __init__(self): super(StockInfoSpyder, self).__init__() self.db = self.db_obj.create_db(config.TUSHARE_DATABASE_NAME) self.col_basic_info", "config.COLLECTION_NAME_STOCK_BASIC_INFO) def get_stock_code_info(self): stock_info_a_code_name_df = ak.stock_info_a_code_name() for _id in range(stock_info_a_code_name_df.shape[0]): _dict = stock_info_a_code_name_df.iloc[_id].to_dict()", "get_stock_code_info(self): stock_info_a_code_name_df = ak.stock_info_a_code_name() for _id in range(stock_info_a_code_name_df.shape[0]): _dict = stock_info_a_code_name_df.iloc[_id].to_dict() self.col.insert_one(_dict) def", "https://waditu.com/document/2 \"\"\" import __init__ from spyder import Spyder from Kite import config import", "import akshare as ak class StockInfoSpyder(Spyder): def __init__(self): super(StockInfoSpyder, self).__init__() self.db = self.db_obj.create_db(config.TUSHARE_DATABASE_NAME)", "super(StockInfoSpyder, self).__init__() self.db = self.db_obj.create_db(config.TUSHARE_DATABASE_NAME) self.col_basic_info = self.db_obj.create_col(self.db, config.COLLECTION_NAME_STOCK_BASIC_INFO) def get_stock_code_info(self): stock_info_a_code_name_df =", "<filename>src/Gon/stock_info_spyder.py \"\"\" https://waditu.com/document/2 \"\"\" import __init__ from spyder import Spyder from Kite import", "\"\"\" import __init__ from spyder import Spyder from Kite import config import tushare", "ts ts.set_token(config.TUSHARE_TOKEN) import akshare as ak class StockInfoSpyder(Spyder): def __init__(self): super(StockInfoSpyder, self).__init__() self.db", "def __init__(self): super(StockInfoSpyder, self).__init__() self.db = self.db_obj.create_db(config.TUSHARE_DATABASE_NAME) self.col_basic_info = self.db_obj.create_col(self.db, config.COLLECTION_NAME_STOCK_BASIC_INFO) def get_stock_code_info(self):", "Spyder from Kite import config import tushare as ts ts.set_token(config.TUSHARE_TOKEN) import akshare as", "as ts ts.set_token(config.TUSHARE_TOKEN) import akshare as ak class StockInfoSpyder(Spyder): def __init__(self): super(StockInfoSpyder, self).__init__()", "= self.db_obj.create_db(config.TUSHARE_DATABASE_NAME) self.col_basic_info = self.db_obj.create_col(self.db, config.COLLECTION_NAME_STOCK_BASIC_INFO) def get_stock_code_info(self): stock_info_a_code_name_df = ak.stock_info_a_code_name() for _id", "for _id in range(stock_info_a_code_name_df.shape[0]): _dict = stock_info_a_code_name_df.iloc[_id].to_dict() self.col.insert_one(_dict) def get_historical_news(self): pass if __name__" ]
[ "27), name_func=boundaries.dashed_attr('WARD_EN'), id_func=boundaries.attr('WARD_NUM'), authority='City of Ottawa', source_url='http://ottawa.ca/online_services/opendata/info/wards2010_en.html', licence_url='http://ottawa.ca/online_services/opendata/terms_en.html', data_url='http://ottawa.ca/online_services/opendata/data/wards2010.zip', notes='Convert the features to", "to 2D with: ogr2ogr -f \"ESRI Shapefile\" -overwrite . Wards_2010.shp -nlt POLYGON', encoding='iso-8859-1',", "datetime import date import boundaries boundaries.register('Ottawa wards', domain='Ottawa, ON', last_updated=date(2010, 8, 27), name_func=boundaries.dashed_attr('WARD_EN'),", "from datetime import date import boundaries boundaries.register('Ottawa wards', domain='Ottawa, ON', last_updated=date(2010, 8, 27),", "source_url='http://ottawa.ca/online_services/opendata/info/wards2010_en.html', licence_url='http://ottawa.ca/online_services/opendata/terms_en.html', data_url='http://ottawa.ca/online_services/opendata/data/wards2010.zip', notes='Convert the features to 2D with: ogr2ogr -f \"ESRI Shapefile\"", "name_func=boundaries.dashed_attr('WARD_EN'), id_func=boundaries.attr('WARD_NUM'), authority='City of Ottawa', source_url='http://ottawa.ca/online_services/opendata/info/wards2010_en.html', licence_url='http://ottawa.ca/online_services/opendata/terms_en.html', data_url='http://ottawa.ca/online_services/opendata/data/wards2010.zip', notes='Convert the features to 2D", "Ottawa', source_url='http://ottawa.ca/online_services/opendata/info/wards2010_en.html', licence_url='http://ottawa.ca/online_services/opendata/terms_en.html', data_url='http://ottawa.ca/online_services/opendata/data/wards2010.zip', notes='Convert the features to 2D with: ogr2ogr -f \"ESRI", "2D with: ogr2ogr -f \"ESRI Shapefile\" -overwrite . Wards_2010.shp -nlt POLYGON', encoding='iso-8859-1', )", "wards', domain='Ottawa, ON', last_updated=date(2010, 8, 27), name_func=boundaries.dashed_attr('WARD_EN'), id_func=boundaries.attr('WARD_NUM'), authority='City of Ottawa', source_url='http://ottawa.ca/online_services/opendata/info/wards2010_en.html', licence_url='http://ottawa.ca/online_services/opendata/terms_en.html',", "domain='Ottawa, ON', last_updated=date(2010, 8, 27), name_func=boundaries.dashed_attr('WARD_EN'), id_func=boundaries.attr('WARD_NUM'), authority='City of Ottawa', source_url='http://ottawa.ca/online_services/opendata/info/wards2010_en.html', licence_url='http://ottawa.ca/online_services/opendata/terms_en.html', data_url='http://ottawa.ca/online_services/opendata/data/wards2010.zip',", "data_url='http://ottawa.ca/online_services/opendata/data/wards2010.zip', notes='Convert the features to 2D with: ogr2ogr -f \"ESRI Shapefile\" -overwrite .", "import date import boundaries boundaries.register('Ottawa wards', domain='Ottawa, ON', last_updated=date(2010, 8, 27), name_func=boundaries.dashed_attr('WARD_EN'), id_func=boundaries.attr('WARD_NUM'),", "boundaries boundaries.register('Ottawa wards', domain='Ottawa, ON', last_updated=date(2010, 8, 27), name_func=boundaries.dashed_attr('WARD_EN'), id_func=boundaries.attr('WARD_NUM'), authority='City of Ottawa',", "8, 27), name_func=boundaries.dashed_attr('WARD_EN'), id_func=boundaries.attr('WARD_NUM'), authority='City of Ottawa', source_url='http://ottawa.ca/online_services/opendata/info/wards2010_en.html', licence_url='http://ottawa.ca/online_services/opendata/terms_en.html', data_url='http://ottawa.ca/online_services/opendata/data/wards2010.zip', notes='Convert the features", "authority='City of Ottawa', source_url='http://ottawa.ca/online_services/opendata/info/wards2010_en.html', licence_url='http://ottawa.ca/online_services/opendata/terms_en.html', data_url='http://ottawa.ca/online_services/opendata/data/wards2010.zip', notes='Convert the features to 2D with: ogr2ogr", "boundaries.register('Ottawa wards', domain='Ottawa, ON', last_updated=date(2010, 8, 27), name_func=boundaries.dashed_attr('WARD_EN'), id_func=boundaries.attr('WARD_NUM'), authority='City of Ottawa', source_url='http://ottawa.ca/online_services/opendata/info/wards2010_en.html',", "licence_url='http://ottawa.ca/online_services/opendata/terms_en.html', data_url='http://ottawa.ca/online_services/opendata/data/wards2010.zip', notes='Convert the features to 2D with: ogr2ogr -f \"ESRI Shapefile\" -overwrite", "last_updated=date(2010, 8, 27), name_func=boundaries.dashed_attr('WARD_EN'), id_func=boundaries.attr('WARD_NUM'), authority='City of Ottawa', source_url='http://ottawa.ca/online_services/opendata/info/wards2010_en.html', licence_url='http://ottawa.ca/online_services/opendata/terms_en.html', data_url='http://ottawa.ca/online_services/opendata/data/wards2010.zip', notes='Convert the", "id_func=boundaries.attr('WARD_NUM'), authority='City of Ottawa', source_url='http://ottawa.ca/online_services/opendata/info/wards2010_en.html', licence_url='http://ottawa.ca/online_services/opendata/terms_en.html', data_url='http://ottawa.ca/online_services/opendata/data/wards2010.zip', notes='Convert the features to 2D with:", "features to 2D with: ogr2ogr -f \"ESRI Shapefile\" -overwrite . Wards_2010.shp -nlt POLYGON',", "ON', last_updated=date(2010, 8, 27), name_func=boundaries.dashed_attr('WARD_EN'), id_func=boundaries.attr('WARD_NUM'), authority='City of Ottawa', source_url='http://ottawa.ca/online_services/opendata/info/wards2010_en.html', licence_url='http://ottawa.ca/online_services/opendata/terms_en.html', data_url='http://ottawa.ca/online_services/opendata/data/wards2010.zip', notes='Convert", "the features to 2D with: ogr2ogr -f \"ESRI Shapefile\" -overwrite . Wards_2010.shp -nlt", "notes='Convert the features to 2D with: ogr2ogr -f \"ESRI Shapefile\" -overwrite . Wards_2010.shp", "date import boundaries boundaries.register('Ottawa wards', domain='Ottawa, ON', last_updated=date(2010, 8, 27), name_func=boundaries.dashed_attr('WARD_EN'), id_func=boundaries.attr('WARD_NUM'), authority='City", "import boundaries boundaries.register('Ottawa wards', domain='Ottawa, ON', last_updated=date(2010, 8, 27), name_func=boundaries.dashed_attr('WARD_EN'), id_func=boundaries.attr('WARD_NUM'), authority='City of", "of Ottawa', source_url='http://ottawa.ca/online_services/opendata/info/wards2010_en.html', licence_url='http://ottawa.ca/online_services/opendata/terms_en.html', data_url='http://ottawa.ca/online_services/opendata/data/wards2010.zip', notes='Convert the features to 2D with: ogr2ogr -f" ]
[ "- [Industrial exhibits.]\"), (False, 'Fifth Avenue - 90th Street, southeast corner'), (False, 'Recreation", "- Miscellaneous - Children.'), (True, 'Manhattan: 59th Street - 6th Avenue'), (True, 'Queens:", "(West) - 5th Avenue'), (True, 'Manhattan: 5th Avenue - 78th Street'), (True, 'Manhattan:", "Sailboat Pool'), (True, 'Queens: Colonial Avenue - 62nd Drive'), (True, 'Queens: Woodhaven Blvd", "def test_clean_title(): for correct, title in TRUTH: assert correct == title_cleaner.is_pure_location(title), '%s %s'", "78th Street'), (True, 'Manhattan: 5th Avenue - 33rd Street'), (True, 'Queens: Queens Boulevard", "Street - 6th Avenue'), (True, 'Queens: Queens Boulevard - Junction Boulevard'), (True, 'Manhattan:", "24th Street'), (False, \"Queens: Flushing Meadow Park - New York World's Fair of", "Flushing Meadow Park - New York World's Fair of 1939-40 - [Industrial exhibits.]\"),", "33rd Street'), (True, 'Queens: Queens Boulevard - 62nd Avenue'), (False, 'Manhattan: Battery Park.'),", "Pool'), (True, 'Queens: Colonial Avenue - 62nd Drive'), (True, 'Queens: Woodhaven Blvd -", "'Manhattan: 5th Avenue - 33rd Street'), (True, 'Queens: Queens Boulevard - 62nd Avenue'),", "(True, 'Manhattan: 1st Ave. - 34th St. E.'), (True, 'Queens: Hoyt Avenue -", "- New York World's Fair of 1939-40 - [Industrial exhibits.]\"), (False, 'Fifth Avenue", "'Manhattan: 50th Street (West) - 5th Avenue'), (True, 'Manhattan: 5th Avenue - 78th", "- Fleet Street'), (True, 'Richmond: New Dorp Lane - Cedar Grove Avenue') ]", "6th Avenue'), (True, 'Queens: Queens Boulevard - Junction Boulevard'), (True, 'Manhattan: 50th Street", "Junction Boulevard'), (True, 'Manhattan: 50th Street (West) - 5th Avenue'), (True, 'Manhattan: 5th", "southeast corner'), (False, 'Recreation and hobbies - Miscellaneous - Children.'), (True, 'Manhattan: 59th", "62nd Avenue'), (False, 'Manhattan: Battery Park.'), (False, 'Manhattan: Central Park - The Sailboat", "(False, 'Manhattan: Central Park - The Sailboat Pool'), (True, 'Queens: Colonial Avenue -", "(True, 'Manhattan: 5th Avenue - 78th Street'), (True, 'Manhattan: 5th Avenue - 33rd", "- 6th Avenue'), (True, 'Queens: Queens Boulevard - Junction Boulevard'), (True, 'Manhattan: 50th", "* import title_cleaner TRUTH = [ (True, 'Manhattan: 1st Ave. - 34th St.", "- 78th Street'), (True, 'Manhattan: 5th Avenue - 33rd Street'), (True, 'Queens: Queens", "Avenue'), (True, 'Manhattan: 5th Avenue - 78th Street'), (True, 'Manhattan: 5th Avenue -", "(False, 'Fifth Avenue - 90th Street, southeast corner'), (False, 'Recreation and hobbies -", "'Manhattan: 1st Ave. - 34th St. E.'), (True, 'Queens: Hoyt Avenue - 24th", "90th Street, southeast corner'), (False, 'Recreation and hobbies - Miscellaneous - Children.'), (True,", "Colonial Avenue - 62nd Drive'), (True, 'Queens: Woodhaven Blvd - Fleet Street'), (True,", "Street, southeast corner'), (False, 'Recreation and hobbies - Miscellaneous - Children.'), (True, 'Manhattan:", "- 62nd Avenue'), (False, 'Manhattan: Battery Park.'), (False, 'Manhattan: Central Park - The", "title_cleaner TRUTH = [ (True, 'Manhattan: 1st Ave. - 34th St. E.'), (True,", "'Richmond: New Dorp Lane - Cedar Grove Avenue') ] def test_clean_title(): for correct,", "Park - The Sailboat Pool'), (True, 'Queens: Colonial Avenue - 62nd Drive'), (True,", "The Sailboat Pool'), (True, 'Queens: Colonial Avenue - 62nd Drive'), (True, 'Queens: Woodhaven", "(True, 'Manhattan: 50th Street (West) - 5th Avenue'), (True, 'Manhattan: 5th Avenue -", "'Manhattan: 5th Avenue - 78th Street'), (True, 'Manhattan: 5th Avenue - 33rd Street'),", "(True, 'Richmond: New Dorp Lane - Cedar Grove Avenue') ] def test_clean_title(): for", "59th Street - 6th Avenue'), (True, 'Queens: Queens Boulevard - Junction Boulevard'), (True,", "Central Park - The Sailboat Pool'), (True, 'Queens: Colonial Avenue - 62nd Drive'),", "(False, 'Recreation and hobbies - Miscellaneous - Children.'), (True, 'Manhattan: 59th Street -", "- 34th St. E.'), (True, 'Queens: Hoyt Avenue - 24th Street'), (False, \"Queens:", "Avenue'), (True, 'Queens: Queens Boulevard - Junction Boulevard'), (True, 'Manhattan: 50th Street (West)", "[Industrial exhibits.]\"), (False, 'Fifth Avenue - 90th Street, southeast corner'), (False, 'Recreation and", "Avenue - 90th Street, southeast corner'), (False, 'Recreation and hobbies - Miscellaneous -", "(True, 'Queens: Queens Boulevard - Junction Boulevard'), (True, 'Manhattan: 50th Street (West) -", "import * import title_cleaner TRUTH = [ (True, 'Manhattan: 1st Ave. - 34th", "5th Avenue - 33rd Street'), (True, 'Queens: Queens Boulevard - 62nd Avenue'), (False,", "'Manhattan: Battery Park.'), (False, 'Manhattan: Central Park - The Sailboat Pool'), (True, 'Queens:", "Lane - Cedar Grove Avenue') ] def test_clean_title(): for correct, title in TRUTH:", "Queens Boulevard - 62nd Avenue'), (False, 'Manhattan: Battery Park.'), (False, 'Manhattan: Central Park", "- 33rd Street'), (True, 'Queens: Queens Boulevard - 62nd Avenue'), (False, 'Manhattan: Battery", "nose.tools import * import title_cleaner TRUTH = [ (True, 'Manhattan: 1st Ave. -", "'Queens: Hoyt Avenue - 24th Street'), (False, \"Queens: Flushing Meadow Park - New", "'Queens: Colonial Avenue - 62nd Drive'), (True, 'Queens: Woodhaven Blvd - Fleet Street'),", "- 90th Street, southeast corner'), (False, 'Recreation and hobbies - Miscellaneous - Children.'),", "- 24th Street'), (False, \"Queens: Flushing Meadow Park - New York World's Fair", "Meadow Park - New York World's Fair of 1939-40 - [Industrial exhibits.]\"), (False,", "York World's Fair of 1939-40 - [Industrial exhibits.]\"), (False, 'Fifth Avenue - 90th", "- The Sailboat Pool'), (True, 'Queens: Colonial Avenue - 62nd Drive'), (True, 'Queens:", "test_clean_title(): for correct, title in TRUTH: assert correct == title_cleaner.is_pure_location(title), '%s %s' %", "Cedar Grove Avenue') ] def test_clean_title(): for correct, title in TRUTH: assert correct", "(True, 'Manhattan: 59th Street - 6th Avenue'), (True, 'Queens: Queens Boulevard - Junction", "Street'), (True, 'Queens: Queens Boulevard - 62nd Avenue'), (False, 'Manhattan: Battery Park.'), (False,", "hobbies - Miscellaneous - Children.'), (True, 'Manhattan: 59th Street - 6th Avenue'), (True,", "Boulevard - 62nd Avenue'), (False, 'Manhattan: Battery Park.'), (False, 'Manhattan: Central Park -", "Avenue - 62nd Drive'), (True, 'Queens: Woodhaven Blvd - Fleet Street'), (True, 'Richmond:", "Drive'), (True, 'Queens: Woodhaven Blvd - Fleet Street'), (True, 'Richmond: New Dorp Lane", "and hobbies - Miscellaneous - Children.'), (True, 'Manhattan: 59th Street - 6th Avenue'),", "Street'), (False, \"Queens: Flushing Meadow Park - New York World's Fair of 1939-40", "Ave. - 34th St. E.'), (True, 'Queens: Hoyt Avenue - 24th Street'), (False,", "(True, 'Queens: Queens Boulevard - 62nd Avenue'), (False, 'Manhattan: Battery Park.'), (False, 'Manhattan:", "'Fifth Avenue - 90th Street, southeast corner'), (False, 'Recreation and hobbies - Miscellaneous", "62nd Drive'), (True, 'Queens: Woodhaven Blvd - Fleet Street'), (True, 'Richmond: New Dorp", "Avenue') ] def test_clean_title(): for correct, title in TRUTH: assert correct == title_cleaner.is_pure_location(title),", "'Queens: Queens Boulevard - Junction Boulevard'), (True, 'Manhattan: 50th Street (West) - 5th", "Dorp Lane - Cedar Grove Avenue') ] def test_clean_title(): for correct, title in", "for correct, title in TRUTH: assert correct == title_cleaner.is_pure_location(title), '%s %s' % (correct,", "correct, title in TRUTH: assert correct == title_cleaner.is_pure_location(title), '%s %s' % (correct, title)", "Boulevard - Junction Boulevard'), (True, 'Manhattan: 50th Street (West) - 5th Avenue'), (True,", "Street'), (True, 'Richmond: New Dorp Lane - Cedar Grove Avenue') ] def test_clean_title():", "1939-40 - [Industrial exhibits.]\"), (False, 'Fifth Avenue - 90th Street, southeast corner'), (False,", "34th St. E.'), (True, 'Queens: Hoyt Avenue - 24th Street'), (False, \"Queens: Flushing", "50th Street (West) - 5th Avenue'), (True, 'Manhattan: 5th Avenue - 78th Street'),", "5th Avenue'), (True, 'Manhattan: 5th Avenue - 78th Street'), (True, 'Manhattan: 5th Avenue", "Avenue - 24th Street'), (False, \"Queens: Flushing Meadow Park - New York World's", "Park.'), (False, 'Manhattan: Central Park - The Sailboat Pool'), (True, 'Queens: Colonial Avenue", "(True, 'Manhattan: 5th Avenue - 33rd Street'), (True, 'Queens: Queens Boulevard - 62nd", "World's Fair of 1939-40 - [Industrial exhibits.]\"), (False, 'Fifth Avenue - 90th Street,", "Fleet Street'), (True, 'Richmond: New Dorp Lane - Cedar Grove Avenue') ] def", "Woodhaven Blvd - Fleet Street'), (True, 'Richmond: New Dorp Lane - Cedar Grove", "- Cedar Grove Avenue') ] def test_clean_title(): for correct, title in TRUTH: assert", "Queens Boulevard - Junction Boulevard'), (True, 'Manhattan: 50th Street (West) - 5th Avenue'),", "Blvd - Fleet Street'), (True, 'Richmond: New Dorp Lane - Cedar Grove Avenue')", "Street'), (True, 'Manhattan: 5th Avenue - 33rd Street'), (True, 'Queens: Queens Boulevard -", "'Queens: Woodhaven Blvd - Fleet Street'), (True, 'Richmond: New Dorp Lane - Cedar", "] def test_clean_title(): for correct, title in TRUTH: assert correct == title_cleaner.is_pure_location(title), '%s", "New York World's Fair of 1939-40 - [Industrial exhibits.]\"), (False, 'Fifth Avenue -", "St. E.'), (True, 'Queens: Hoyt Avenue - 24th Street'), (False, \"Queens: Flushing Meadow", "- 5th Avenue'), (True, 'Manhattan: 5th Avenue - 78th Street'), (True, 'Manhattan: 5th", "- Junction Boulevard'), (True, 'Manhattan: 50th Street (West) - 5th Avenue'), (True, 'Manhattan:", "Hoyt Avenue - 24th Street'), (False, \"Queens: Flushing Meadow Park - New York", "Grove Avenue') ] def test_clean_title(): for correct, title in TRUTH: assert correct ==", "[ (True, 'Manhattan: 1st Ave. - 34th St. E.'), (True, 'Queens: Hoyt Avenue", "Miscellaneous - Children.'), (True, 'Manhattan: 59th Street - 6th Avenue'), (True, 'Queens: Queens", "(True, 'Queens: Hoyt Avenue - 24th Street'), (False, \"Queens: Flushing Meadow Park -", "import title_cleaner TRUTH = [ (True, 'Manhattan: 1st Ave. - 34th St. E.'),", "\"Queens: Flushing Meadow Park - New York World's Fair of 1939-40 - [Industrial", "- Children.'), (True, 'Manhattan: 59th Street - 6th Avenue'), (True, 'Queens: Queens Boulevard", "'Manhattan: 59th Street - 6th Avenue'), (True, 'Queens: Queens Boulevard - Junction Boulevard'),", "- 62nd Drive'), (True, 'Queens: Woodhaven Blvd - Fleet Street'), (True, 'Richmond: New", "Fair of 1939-40 - [Industrial exhibits.]\"), (False, 'Fifth Avenue - 90th Street, southeast", "exhibits.]\"), (False, 'Fifth Avenue - 90th Street, southeast corner'), (False, 'Recreation and hobbies", "Boulevard'), (True, 'Manhattan: 50th Street (West) - 5th Avenue'), (True, 'Manhattan: 5th Avenue", "E.'), (True, 'Queens: Hoyt Avenue - 24th Street'), (False, \"Queens: Flushing Meadow Park", "from nose.tools import * import title_cleaner TRUTH = [ (True, 'Manhattan: 1st Ave.", "Street (West) - 5th Avenue'), (True, 'Manhattan: 5th Avenue - 78th Street'), (True,", "(False, 'Manhattan: Battery Park.'), (False, 'Manhattan: Central Park - The Sailboat Pool'), (True,", "5th Avenue - 78th Street'), (True, 'Manhattan: 5th Avenue - 33rd Street'), (True,", "Battery Park.'), (False, 'Manhattan: Central Park - The Sailboat Pool'), (True, 'Queens: Colonial", "'Manhattan: Central Park - The Sailboat Pool'), (True, 'Queens: Colonial Avenue - 62nd", "(True, 'Queens: Woodhaven Blvd - Fleet Street'), (True, 'Richmond: New Dorp Lane -", "Children.'), (True, 'Manhattan: 59th Street - 6th Avenue'), (True, 'Queens: Queens Boulevard -", "corner'), (False, 'Recreation and hobbies - Miscellaneous - Children.'), (True, 'Manhattan: 59th Street", "= [ (True, 'Manhattan: 1st Ave. - 34th St. E.'), (True, 'Queens: Hoyt", "Avenue - 78th Street'), (True, 'Manhattan: 5th Avenue - 33rd Street'), (True, 'Queens:", "'Queens: Queens Boulevard - 62nd Avenue'), (False, 'Manhattan: Battery Park.'), (False, 'Manhattan: Central", "Park - New York World's Fair of 1939-40 - [Industrial exhibits.]\"), (False, 'Fifth", "TRUTH = [ (True, 'Manhattan: 1st Ave. - 34th St. E.'), (True, 'Queens:", "Avenue - 33rd Street'), (True, 'Queens: Queens Boulevard - 62nd Avenue'), (False, 'Manhattan:", "(True, 'Queens: Colonial Avenue - 62nd Drive'), (True, 'Queens: Woodhaven Blvd - Fleet", "(False, \"Queens: Flushing Meadow Park - New York World's Fair of 1939-40 -", "'Recreation and hobbies - Miscellaneous - Children.'), (True, 'Manhattan: 59th Street - 6th", "of 1939-40 - [Industrial exhibits.]\"), (False, 'Fifth Avenue - 90th Street, southeast corner'),", "New Dorp Lane - Cedar Grove Avenue') ] def test_clean_title(): for correct, title", "Avenue'), (False, 'Manhattan: Battery Park.'), (False, 'Manhattan: Central Park - The Sailboat Pool'),", "1st Ave. - 34th St. E.'), (True, 'Queens: Hoyt Avenue - 24th Street')," ]
[ "Author: Darren Date: 26/02/2021 Solving https://adventofcode.com/2015/day/16 500 Sues. Each with different known attributes,", "as a dict. We also have a list of k:v attributes that we", "1 sue_candidates = sue_list.copy() # we need to find any Sue where k:v", "for known_attrib, known_attrib_value in known_attribs.items(): sues_missing_attrib = [sue for sue in sue_candidates if", "Each with different known attributes, and potentially other forgetten attributes. Examine list of", "and known_attrib_value < sue[1][known_attrib]] elif known_attrib in [POMS, FISH]: sues_matching_attrib = [sue for", "matching MFCSAM attributes: {result}\") def process_input(data): # Input looks like: # Sue 1:", "else: sues_matching_attrib = [sue for sue in sue_candidates if known_attrib in sue[1] and", "in properties} sue_list.append([int(name), props_dict]) return sue_list if __name__ == \"__main__\": t1 = time.perf_counter()", "os import time SCRIPT_DIR = os.path.dirname(__file__) INPUT_FILE = \"input/input.txt\" SAMPLE_INPUT_FILE = \"input/sample_input.txt\" CATS", "we need to find any Sue where k:v is an exact match #", "= [sue for sue in sue_candidates if known_attrib in sue[1] and known_attrib_value <", "line in data: name, attribs = line[4:].split(\":\", 1) properties = [x.strip().split(\":\") for x", "500 Sues. But where we don't know a value, the key is absent.", "sue in sue_candidates] print(f\"Part 1: Aunt Sue candidates matching MFCSAM attributes: {result}\") #", "FISH]: sues_matching_attrib = [sue for sue in sue_candidates if known_attrib in sue[1] and", "in [POMS, FISH]: sues_matching_attrib = [sue for sue in sue_candidates if known_attrib in", "= [] line: str for line in data: name, attribs = line[4:].split(\":\", 1)", "The MFCSAM produces properties, which we store as a dict. We also have", "# we need to find any Sue where k:v is an exact match", "consider any Sue where the k is not present as we don't know", "for sue in sue_candidates if known_attrib in sue[1] and known_attrib_value > sue[1][known_attrib]] else:", "== \"__main__\": t1 = time.perf_counter() main() t2 = time.perf_counter() print(f\"Execution time: {t2 -", "5, TREES: 3, 'cars': 2, 'perfumes': 1 } def main(): # input_file =", "absent. Solution: Part 1: Iterate through our k:V from the MFCSAM. For each", "= [x.strip().split(\":\") for x in attribs.split(\",\")] props_dict = {prop[0]: int(prop[1]) for prop in", "CATS: 7, 'samoyeds': 2, POMS: 3, 'akitas': 0, 'vizslas': 0, FISH: 5, TREES:", "[sue for sue in sue_candidates if known_attrib in sue[1] and known_attrib_value == sue[1][known_attrib]]", "known_attrib_value > sue[1][known_attrib]] else: sues_matching_attrib = [sue for sue in sue_candidates if known_attrib", "attributes. Examine list of k:v pairs determined from item received from Sue, using", "# but also consider any Sue where the k is not present as", "match # but also consider any Sue where the k is not present", "through our k:V from the MFCSAM. For each Sue: If the k is", "Analysis Machine (MFCSAM). The MFCSAM produces properties, which we store as a dict.", "data = f.read().splitlines() sue_list = process_input(data) # Part 1 sue_candidates = sue_list.copy() #", "also consider any Sue where the k is not present as we don't", "9, akitas: 3, goldfish: 0 # Return list. Each item is [i, {k:v,", "If k is present and the value matches, this Sue is a candidate.", "# Return list. Each item is [i, {k:v, k:v...}] sue_list = [] line:", "a candidate. If k is present and the value matches, this Sue is", "name, attribs = line[4:].split(\":\", 1) properties = [x.strip().split(\":\") for x in attribs.split(\",\")] props_dict", "store as a dict. We also have a list of k:v attributes that", "properties = [x.strip().split(\":\") for x in attribs.split(\",\")] props_dict = {prop[0]: int(prop[1]) for prop", "many \"\"\" import os import time SCRIPT_DIR = os.path.dirname(__file__) INPUT_FILE = \"input/input.txt\" SAMPLE_INPUT_FILE", "sue in sue_candidates if known_attrib not in sue[1]] sues_matching_attrib = [] if known_attrib", "if known_attrib not in sue[1]] sues_matching_attrib = [] if known_attrib in [CATS, TREES]:", "First Crime Scene Analysis Machine (MFCSAM). The MFCSAM produces properties, which we store", "value matches, this Sue is a candidate. Part 2: Cats and trees readings", "candidate. Part 2: Cats and trees readings indicates that there are greater than", "potentially other forgetten attributes. Examine list of k:v pairs determined from item received", "# input_file = os.path.join(SCRIPT_DIR, SAMPLE_INPUT_FILE) input_file = os.path.join(SCRIPT_DIR, INPUT_FILE) with open(input_file, mode=\"rt\") as", "are fewer than that many \"\"\" import os import time SCRIPT_DIR = os.path.dirname(__file__)", "the k is not present as we don't know the v for known_attrib,", "the k is not present, this Sue is a candidate. If k is", "known_attrib_value < sue[1][known_attrib]] elif known_attrib in [POMS, FISH]: sues_matching_attrib = [sue for sue", "that many \"\"\" import os import time SCRIPT_DIR = os.path.dirname(__file__) INPUT_FILE = \"input/input.txt\"", "But where we don't know a value, the key is absent. Solution: Part", "Sues. But where we don't know a value, the key is absent. Solution:", "if known_attrib in sue[1] and known_attrib_value > sue[1][known_attrib]] else: sues_matching_attrib = [sue for", "SCRIPT_DIR = os.path.dirname(__file__) INPUT_FILE = \"input/input.txt\" SAMPLE_INPUT_FILE = \"input/sample_input.txt\" CATS = 'cats' TREES", "from Sue, using the My First Crime Scene Analysis Machine (MFCSAM). The MFCSAM", "candidates matching MFCSAM attributes: {result}\") def process_input(data): # Input looks like: # Sue", "# Part 1 sue_candidates = sue_list.copy() # we need to find any Sue", "know a value, the key is absent. Solution: Part 1: Iterate through our", "known_attrib in sue[1] and known_attrib_value < sue[1][known_attrib]] elif known_attrib in [POMS, FISH]: sues_matching_attrib", "sue_candidates if known_attrib not in sue[1]] sues_matching_attrib = [sue for sue in sue_candidates", "{k:v, k:v...}] sue_list = [] line: str for line in data: name, attribs", "sues_matching_attrib + sues_missing_attrib result = [sue[0] for sue in sue_candidates] print(f\"Part 1: Aunt", "+ sues_missing_attrib result = [sue[0] for sue in sue_candidates] print(f\"Part 2: Aunt Sue", "is not present as we don't know the v for known_attrib, known_attrib_value in", "1 } def main(): # input_file = os.path.join(SCRIPT_DIR, SAMPLE_INPUT_FILE) input_file = os.path.join(SCRIPT_DIR, INPUT_FILE)", "[] if known_attrib in [CATS, TREES]: sues_matching_attrib = [sue for sue in sue_candidates", "input_file = os.path.join(SCRIPT_DIR, INPUT_FILE) with open(input_file, mode=\"rt\") as f: data = f.read().splitlines() sue_list", "\"\"\" import os import time SCRIPT_DIR = os.path.dirname(__file__) INPUT_FILE = \"input/input.txt\" SAMPLE_INPUT_FILE =", "we don't know the v for known_attrib, known_attrib_value in known_attribs.items(): sues_missing_attrib = [sue", "attributes that we can remember from 500 Sues. But where we don't know", "= os.path.join(SCRIPT_DIR, SAMPLE_INPUT_FILE) input_file = os.path.join(SCRIPT_DIR, INPUT_FILE) with open(input_file, mode=\"rt\") as f: data", "remember from 500 Sues. But where we don't know a value, the key", "readings indicate that there are fewer than that many \"\"\" import os import", "for sue in sue_candidates] print(f\"Part 1: Aunt Sue candidates matching MFCSAM attributes: {result}\")", "in data: name, attribs = line[4:].split(\":\", 1) properties = [x.strip().split(\":\") for x in", "[sue for sue in sue_candidates if known_attrib in sue[1] and known_attrib_value < sue[1][known_attrib]]", "k:v pairs determined from item received from Sue, using the My First Crime", "= sue_list.copy() for known_attrib, known_attrib_value in known_attribs.items(): sues_missing_attrib = [sue for sue in", "3, 'akitas': 0, 'vizslas': 0, FISH: 5, TREES: 3, 'cars': 2, 'perfumes': 1", "k:v...}] sue_list = [] line: str for line in data: name, attribs =", "f.read().splitlines() sue_list = process_input(data) # Part 1 sue_candidates = sue_list.copy() # we need", "k is not present as we don't know the v for known_attrib, known_attrib_value", "attributes: {result}\") def process_input(data): # Input looks like: # Sue 1: cars: 9,", "'cats' TREES = 'trees' POMS = 'pomeranians' FISH = 'goldfish' known_attribs = {", "this Sue is a candidate. If k is present and the value matches,", "main(): # input_file = os.path.join(SCRIPT_DIR, SAMPLE_INPUT_FILE) input_file = os.path.join(SCRIPT_DIR, INPUT_FILE) with open(input_file, mode=\"rt\")", "than that many \"\"\" import os import time SCRIPT_DIR = os.path.dirname(__file__) INPUT_FILE =", "sue_list if __name__ == \"__main__\": t1 = time.perf_counter() main() t2 = time.perf_counter() print(f\"Execution", "is an exact match # but also consider any Sue where the k", "sue_candidates] print(f\"Part 2: Aunt Sue candidates matching MFCSAM attributes: {result}\") def process_input(data): #", "attribs = line[4:].split(\":\", 1) properties = [x.strip().split(\":\") for x in attribs.split(\",\")] props_dict =", "My First Crime Scene Analysis Machine (MFCSAM). The MFCSAM produces properties, which we", "and trees readings indicates that there are greater than that many Pomeranians and", "CATS = 'cats' TREES = 'trees' POMS = 'pomeranians' FISH = 'goldfish' known_attribs", "= sues_matching_attrib + sues_missing_attrib result = [sue[0] for sue in sue_candidates] print(f\"Part 1:", "TREES]: sues_matching_attrib = [sue for sue in sue_candidates if known_attrib in sue[1] and", "in sue_candidates] print(f\"Part 2: Aunt Sue candidates matching MFCSAM attributes: {result}\") def process_input(data):", "and potentially other forgetten attributes. Examine list of k:v pairs determined from item", "= { 'children': 3, CATS: 7, 'samoyeds': 2, POMS: 3, 'akitas': 0, 'vizslas':", "other forgetten attributes. Examine list of k:v pairs determined from item received from", "the v for known_attrib, known_attrib_value in known_attribs.items(): sues_missing_attrib = [sue for sue in", "is a candidate. If k is present and the value matches, this Sue", "'perfumes': 1 } def main(): # input_file = os.path.join(SCRIPT_DIR, SAMPLE_INPUT_FILE) input_file = os.path.join(SCRIPT_DIR,", "2 sue_candidates = sue_list.copy() for known_attrib, known_attrib_value in known_attribs.items(): sues_missing_attrib = [sue for", "we don't know a value, the key is absent. Solution: Part 1: Iterate", "is a candidate. Part 2: Cats and trees readings indicates that there are", "\"input/input.txt\" SAMPLE_INPUT_FILE = \"input/sample_input.txt\" CATS = 'cats' TREES = 'trees' POMS = 'pomeranians'", "present as we don't know the v for known_attrib, known_attrib_value in known_attribs.items(): sues_missing_attrib", "sue_candidates if known_attrib in sue[1] and known_attrib_value < sue[1][known_attrib]] elif known_attrib in [POMS,", "MFCSAM attributes: {result}\") # Part 2 sue_candidates = sue_list.copy() for known_attrib, known_attrib_value in", "properties} sue_list.append([int(name), props_dict]) return sue_list if __name__ == \"__main__\": t1 = time.perf_counter() main()", "using the My First Crime Scene Analysis Machine (MFCSAM). The MFCSAM produces properties,", "'trees' POMS = 'pomeranians' FISH = 'goldfish' known_attribs = { 'children': 3, CATS:", "Each item is [i, {k:v, k:v...}] sue_list = [] line: str for line", "Darren Date: 26/02/2021 Solving https://adventofcode.com/2015/day/16 500 Sues. Each with different known attributes, and", "known_attrib not in sue[1]] sues_matching_attrib = [] if known_attrib in [CATS, TREES]: sues_matching_attrib", "Sue where k:v is an exact match # but also consider any Sue", "We also have a list of k:v attributes that we can remember from", "0, FISH: 5, TREES: 3, 'cars': 2, 'perfumes': 1 } def main(): #", "TREES: 3, 'cars': 2, 'perfumes': 1 } def main(): # input_file = os.path.join(SCRIPT_DIR,", "sue_candidates = sue_list.copy() # we need to find any Sue where k:v is", "this Sue is a candidate. Part 2: Cats and trees readings indicates that", "is absent. Solution: Part 1: Iterate through our k:V from the MFCSAM. For", "candidates matching MFCSAM attributes: {result}\") # Part 2 sue_candidates = sue_list.copy() for known_attrib,", "matches, this Sue is a candidate. Part 2: Cats and trees readings indicates", "sues_matching_attrib + sues_missing_attrib result = [sue[0] for sue in sue_candidates] print(f\"Part 2: Aunt", "sue[1] and known_attrib_value > sue[1][known_attrib]] else: sues_matching_attrib = [sue for sue in sue_candidates", "different known attributes, and potentially other forgetten attributes. Examine list of k:v pairs", "prop in properties} sue_list.append([int(name), props_dict]) return sue_list if __name__ == \"__main__\": t1 =", "sue_candidates = sues_matching_attrib + sues_missing_attrib result = [sue[0] for sue in sue_candidates] print(f\"Part", "} def main(): # input_file = os.path.join(SCRIPT_DIR, SAMPLE_INPUT_FILE) input_file = os.path.join(SCRIPT_DIR, INPUT_FILE) with", "and known_attrib_value == sue[1][known_attrib]] sue_candidates = sues_matching_attrib + sues_missing_attrib result = [sue[0] for", "in attribs.split(\",\")] props_dict = {prop[0]: int(prop[1]) for prop in properties} sue_list.append([int(name), props_dict]) return", "props_dict]) return sue_list if __name__ == \"__main__\": t1 = time.perf_counter() main() t2 =", "is [i, {k:v, k:v...}] sue_list = [] line: str for line in data:", "know the v for known_attrib, known_attrib_value in known_attribs.items(): sues_missing_attrib = [sue for sue", "the value matches, this Sue is a candidate. Part 2: Cats and trees", "pairs determined from item received from Sue, using the My First Crime Scene", "[sue[0] for sue in sue_candidates] print(f\"Part 1: Aunt Sue candidates matching MFCSAM attributes:", "dict. We also have a list of k:v attributes that we can remember", "If the k is not present, this Sue is a candidate. If k", "sue[1] and known_attrib_value < sue[1][known_attrib]] elif known_attrib in [POMS, FISH]: sues_matching_attrib = [sue", "= line[4:].split(\":\", 1) properties = [x.strip().split(\":\") for x in attribs.split(\",\")] props_dict = {prop[0]:", "= \"input/sample_input.txt\" CATS = 'cats' TREES = 'trees' POMS = 'pomeranians' FISH =", "trees readings indicates that there are greater than that many Pomeranians and goldfish", "Pomeranians and goldfish readings indicate that there are fewer than that many \"\"\"", "in sue_candidates if known_attrib not in sue[1]] sues_matching_attrib = [sue for sue in", "2, 'perfumes': 1 } def main(): # input_file = os.path.join(SCRIPT_DIR, SAMPLE_INPUT_FILE) input_file =", "POMS = 'pomeranians' FISH = 'goldfish' known_attribs = { 'children': 3, CATS: 7,", "https://adventofcode.com/2015/day/16 500 Sues. Each with different known attributes, and potentially other forgetten attributes.", "data: name, attribs = line[4:].split(\":\", 1) properties = [x.strip().split(\":\") for x in attribs.split(\",\")]", "indicates that there are greater than that many Pomeranians and goldfish readings indicate", "in sue_candidates if known_attrib in sue[1] and known_attrib_value < sue[1][known_attrib]] elif known_attrib in", "not in sue[1]] sues_matching_attrib = [sue for sue in sue_candidates if known_attrib in", "produces properties, which we store as a dict. We also have a list", "in known_attribs.items(): sues_missing_attrib = [sue for sue in sue_candidates if known_attrib not in", "present and the value matches, this Sue is a candidate. Part 2: Cats", "not present, this Sue is a candidate. If k is present and the", "sue in sue_candidates if known_attrib in sue[1] and known_attrib_value > sue[1][known_attrib]] else: sues_matching_attrib", "for line in data: name, attribs = line[4:].split(\":\", 1) properties = [x.strip().split(\":\") for", "Part 2: Cats and trees readings indicates that there are greater than that", "result = [sue[0] for sue in sue_candidates] print(f\"Part 2: Aunt Sue candidates matching", "in sue[1] and known_attrib_value > sue[1][known_attrib]] else: sues_matching_attrib = [sue for sue in", "is present and the value matches, this Sue is a candidate. Part 2:", "str for line in data: name, attribs = line[4:].split(\":\", 1) properties = [x.strip().split(\":\")", "= os.path.join(SCRIPT_DIR, INPUT_FILE) with open(input_file, mode=\"rt\") as f: data = f.read().splitlines() sue_list =", "= sue_list.copy() # we need to find any Sue where k:v is an", "sue_list.copy() # we need to find any Sue where k:v is an exact", "3, CATS: 7, 'samoyeds': 2, POMS: 3, 'akitas': 0, 'vizslas': 0, FISH: 5,", "0 # Return list. Each item is [i, {k:v, k:v...}] sue_list = []", "\"__main__\": t1 = time.perf_counter() main() t2 = time.perf_counter() print(f\"Execution time: {t2 - t1:0.4f}", "for sue in sue_candidates if known_attrib not in sue[1]] sues_matching_attrib = [sue for", "# Input looks like: # Sue 1: cars: 9, akitas: 3, goldfish: 0", "find any Sue where k:v is an exact match # but also consider", "have a list of k:v attributes that we can remember from 500 Sues.", "Part 2 sue_candidates = sue_list.copy() for known_attrib, known_attrib_value in known_attribs.items(): sues_missing_attrib = [sue", "500 Sues. Each with different known attributes, and potentially other forgetten attributes. Examine", "k:v attributes that we can remember from 500 Sues. But where we don't", "= process_input(data) # Part 1 sue_candidates = sue_list.copy() # we need to find", "sue[1][known_attrib]] sue_candidates = sues_matching_attrib + sues_missing_attrib result = [sue[0] for sue in sue_candidates]", "= 'cats' TREES = 'trees' POMS = 'pomeranians' FISH = 'goldfish' known_attribs =", "value, the key is absent. Solution: Part 1: Iterate through our k:V from", "attributes, and potentially other forgetten attributes. Examine list of k:v pairs determined from", "[CATS, TREES]: sues_matching_attrib = [sue for sue in sue_candidates if known_attrib in sue[1]", "{result}\") def process_input(data): # Input looks like: # Sue 1: cars: 9, akitas:", "Part 1: Iterate through our k:V from the MFCSAM. For each Sue: If", "in sue[1] and known_attrib_value == sue[1][known_attrib]] sue_candidates = sues_matching_attrib + sues_missing_attrib result =", "need to find any Sue where k:v is an exact match # but", "known_attrib_value == sue[1][known_attrib]] sue_candidates = sues_matching_attrib + sues_missing_attrib result = [sue[0] for sue", "sue in sue_candidates] print(f\"Part 2: Aunt Sue candidates matching MFCSAM attributes: {result}\") def", "known_attrib in sue[1] and known_attrib_value > sue[1][known_attrib]] else: sues_matching_attrib = [sue for sue", "elif known_attrib in [POMS, FISH]: sues_matching_attrib = [sue for sue in sue_candidates if", "26/02/2021 Solving https://adventofcode.com/2015/day/16 500 Sues. Each with different known attributes, and potentially other", "< sue[1][known_attrib]] elif known_attrib in [POMS, FISH]: sues_matching_attrib = [sue for sue in", "t1 = time.perf_counter() main() t2 = time.perf_counter() print(f\"Execution time: {t2 - t1:0.4f} seconds\")", "k is not present, this Sue is a candidate. If k is present", "which we store as a dict. We also have a list of k:v", "the key is absent. Solution: Part 1: Iterate through our k:V from the", "k:V from the MFCSAM. For each Sue: If the k is not present,", "1) properties = [x.strip().split(\":\") for x in attribs.split(\",\")] props_dict = {prop[0]: int(prop[1]) for", "is not present, this Sue is a candidate. If k is present and", "key is absent. Solution: Part 1: Iterate through our k:V from the MFCSAM.", "sue_candidates if known_attrib in sue[1] and known_attrib_value > sue[1][known_attrib]] else: sues_matching_attrib = [sue", "(MFCSAM). The MFCSAM produces properties, which we store as a dict. We also", "goldfish readings indicate that there are fewer than that many \"\"\" import os", "known_attrib in [CATS, TREES]: sues_matching_attrib = [sue for sue in sue_candidates if known_attrib", "like: # Sue 1: cars: 9, akitas: 3, goldfish: 0 # Return list.", "of k:v pairs determined from item received from Sue, using the My First", "readings indicates that there are greater than that many Pomeranians and goldfish readings", "known attributes, and potentially other forgetten attributes. Examine list of k:v pairs determined", "2: Cats and trees readings indicates that there are greater than that many", "sue_list = process_input(data) # Part 1 sue_candidates = sue_list.copy() # we need to", "in sue_candidates] print(f\"Part 1: Aunt Sue candidates matching MFCSAM attributes: {result}\") # Part", "if known_attrib in [CATS, TREES]: sues_matching_attrib = [sue for sue in sue_candidates if", "def main(): # input_file = os.path.join(SCRIPT_DIR, SAMPLE_INPUT_FILE) input_file = os.path.join(SCRIPT_DIR, INPUT_FILE) with open(input_file,", "attributes: {result}\") # Part 2 sue_candidates = sue_list.copy() for known_attrib, known_attrib_value in known_attribs.items():", "for sue in sue_candidates] print(f\"Part 2: Aunt Sue candidates matching MFCSAM attributes: {result}\")", "known_attrib in [POMS, FISH]: sues_matching_attrib = [sue for sue in sue_candidates if known_attrib", "in [CATS, TREES]: sues_matching_attrib = [sue for sue in sue_candidates if known_attrib in", "indicate that there are fewer than that many \"\"\" import os import time", "sue_list = [] line: str for line in data: name, attribs = line[4:].split(\":\",", "== sue[1][known_attrib]] sue_candidates = sues_matching_attrib + sues_missing_attrib result = [sue[0] for sue in", "= 'trees' POMS = 'pomeranians' FISH = 'goldfish' known_attribs = { 'children': 3,", "fewer than that many \"\"\" import os import time SCRIPT_DIR = os.path.dirname(__file__) INPUT_FILE", "we store as a dict. We also have a list of k:v attributes", "Sue where the k is not present as we don't know the v", "any Sue where the k is not present as we don't know the", "known_attribs.items(): sues_missing_attrib = [sue for sue in sue_candidates if known_attrib not in sue[1]]", "cars: 9, akitas: 3, goldfish: 0 # Return list. Each item is [i,", "Date: 26/02/2021 Solving https://adventofcode.com/2015/day/16 500 Sues. Each with different known attributes, and potentially", "don't know a value, the key is absent. Solution: Part 1: Iterate through", "open(input_file, mode=\"rt\") as f: data = f.read().splitlines() sue_list = process_input(data) # Part 1", "= [sue[0] for sue in sue_candidates] print(f\"Part 2: Aunt Sue candidates matching MFCSAM", "= [sue[0] for sue in sue_candidates] print(f\"Part 1: Aunt Sue candidates matching MFCSAM", "FISH = 'goldfish' known_attribs = { 'children': 3, CATS: 7, 'samoyeds': 2, POMS:", "sue_candidates = sue_list.copy() for known_attrib, known_attrib_value in known_attribs.items(): sues_missing_attrib = [sue for sue", "known_attrib in sue[1] and known_attrib_value == sue[1][known_attrib]] sue_candidates = sues_matching_attrib + sues_missing_attrib result", "= [sue for sue in sue_candidates if known_attrib in sue[1] and known_attrib_value ==", "sue in sue_candidates if known_attrib not in sue[1]] sues_matching_attrib = [sue for sue", "# Sue 1: cars: 9, akitas: 3, goldfish: 0 # Return list. Each", "props_dict = {prop[0]: int(prop[1]) for prop in properties} sue_list.append([int(name), props_dict]) return sue_list if", "known_attrib, known_attrib_value in known_attribs.items(): sues_missing_attrib = [sue for sue in sue_candidates if known_attrib", "to find any Sue where k:v is an exact match # but also", "for x in attribs.split(\",\")] props_dict = {prop[0]: int(prop[1]) for prop in properties} sue_list.append([int(name),", "0, 'vizslas': 0, FISH: 5, TREES: 3, 'cars': 2, 'perfumes': 1 } def", "[sue for sue in sue_candidates if known_attrib in sue[1] and known_attrib_value > sue[1][known_attrib]]", "not present as we don't know the v for known_attrib, known_attrib_value in known_attribs.items():", "sue[1] and known_attrib_value == sue[1][known_attrib]] sue_candidates = sues_matching_attrib + sues_missing_attrib result = [sue[0]", "= 'pomeranians' FISH = 'goldfish' known_attribs = { 'children': 3, CATS: 7, 'samoyeds':", "<reponame>derailed-dash/Advent-of-Code<filename>src/AoC_2015/d16_finding_Sue_with_list_comprehension/finding_sue.py \"\"\" Author: Darren Date: 26/02/2021 Solving https://adventofcode.com/2015/day/16 500 Sues. Each with different", "sues_missing_attrib result = [sue[0] for sue in sue_candidates] print(f\"Part 1: Aunt Sue candidates", "and known_attrib_value > sue[1][known_attrib]] else: sues_matching_attrib = [sue for sue in sue_candidates if", "we can remember from 500 Sues. But where we don't know a value,", "[x.strip().split(\":\") for x in attribs.split(\",\")] props_dict = {prop[0]: int(prop[1]) for prop in properties}", "= [sue for sue in sue_candidates if known_attrib in sue[1] and known_attrib_value >", "in sue[1]] sues_matching_attrib = [] if known_attrib in [CATS, TREES]: sues_matching_attrib = [sue", "each Sue: If the k is not present, this Sue is a candidate.", "than that many Pomeranians and goldfish readings indicate that there are fewer than", "1: Iterate through our k:V from the MFCSAM. For each Sue: If the", "Sue is a candidate. Part 2: Cats and trees readings indicates that there", "import time SCRIPT_DIR = os.path.dirname(__file__) INPUT_FILE = \"input/input.txt\" SAMPLE_INPUT_FILE = \"input/sample_input.txt\" CATS =", "sue[1][known_attrib]] else: sues_matching_attrib = [sue for sue in sue_candidates if known_attrib in sue[1]", "os.path.join(SCRIPT_DIR, SAMPLE_INPUT_FILE) input_file = os.path.join(SCRIPT_DIR, INPUT_FILE) with open(input_file, mode=\"rt\") as f: data =", "+ sues_missing_attrib result = [sue[0] for sue in sue_candidates] print(f\"Part 1: Aunt Sue", "sue[1]] sues_matching_attrib = [] if known_attrib in [CATS, TREES]: sues_matching_attrib = [sue for", "return sue_list if __name__ == \"__main__\": t1 = time.perf_counter() main() t2 = time.perf_counter()", "result = [sue[0] for sue in sue_candidates] print(f\"Part 1: Aunt Sue candidates matching", "mode=\"rt\") as f: data = f.read().splitlines() sue_list = process_input(data) # Part 1 sue_candidates", "a candidate. Part 2: Cats and trees readings indicates that there are greater", "Cats and trees readings indicates that there are greater than that many Pomeranians", "time SCRIPT_DIR = os.path.dirname(__file__) INPUT_FILE = \"input/input.txt\" SAMPLE_INPUT_FILE = \"input/sample_input.txt\" CATS = 'cats'", "item is [i, {k:v, k:v...}] sue_list = [] line: str for line in", "{ 'children': 3, CATS: 7, 'samoyeds': 2, POMS: 3, 'akitas': 0, 'vizslas': 0,", "sues_missing_attrib = [sue for sue in sue_candidates if known_attrib not in sue[1]] sues_matching_attrib", "sues_missing_attrib result = [sue[0] for sue in sue_candidates] print(f\"Part 2: Aunt Sue candidates", "1: Aunt Sue candidates matching MFCSAM attributes: {result}\") # Part 2 sue_candidates =", "= [sue for sue in sue_candidates if known_attrib not in sue[1]] sues_matching_attrib =", "2: Aunt Sue candidates matching MFCSAM attributes: {result}\") def process_input(data): # Input looks", "# Part 2 sue_candidates = sue_list.copy() for known_attrib, known_attrib_value in known_attribs.items(): sues_missing_attrib =", "list of k:v attributes that we can remember from 500 Sues. But where", "__name__ == \"__main__\": t1 = time.perf_counter() main() t2 = time.perf_counter() print(f\"Execution time: {t2", "there are fewer than that many \"\"\" import os import time SCRIPT_DIR =", "Solution: Part 1: Iterate through our k:V from the MFCSAM. For each Sue:", "'goldfish' known_attribs = { 'children': 3, CATS: 7, 'samoyeds': 2, POMS: 3, 'akitas':", "where we don't know a value, the key is absent. Solution: Part 1:", "For each Sue: If the k is not present, this Sue is a", "that we can remember from 500 Sues. But where we don't know a", "Solving https://adventofcode.com/2015/day/16 500 Sues. Each with different known attributes, and potentially other forgetten", "process_input(data): # Input looks like: # Sue 1: cars: 9, akitas: 3, goldfish:", "def process_input(data): # Input looks like: # Sue 1: cars: 9, akitas: 3,", "from the MFCSAM. For each Sue: If the k is not present, this", "= os.path.dirname(__file__) INPUT_FILE = \"input/input.txt\" SAMPLE_INPUT_FILE = \"input/sample_input.txt\" CATS = 'cats' TREES =", "and the value matches, this Sue is a candidate. Part 2: Cats and", "line[4:].split(\":\", 1) properties = [x.strip().split(\":\") for x in attribs.split(\",\")] props_dict = {prop[0]: int(prop[1])", "INPUT_FILE = \"input/input.txt\" SAMPLE_INPUT_FILE = \"input/sample_input.txt\" CATS = 'cats' TREES = 'trees' POMS", "looks like: # Sue 1: cars: 9, akitas: 3, goldfish: 0 # Return", "in sue[1] and known_attrib_value < sue[1][known_attrib]] elif known_attrib in [POMS, FISH]: sues_matching_attrib =", "Sue, using the My First Crime Scene Analysis Machine (MFCSAM). The MFCSAM produces", "'samoyeds': 2, POMS: 3, 'akitas': 0, 'vizslas': 0, FISH: 5, TREES: 3, 'cars':", "process_input(data) # Part 1 sue_candidates = sue_list.copy() # we need to find any", "[i, {k:v, k:v...}] sue_list = [] line: str for line in data: name,", "from item received from Sue, using the My First Crime Scene Analysis Machine", "= \"input/input.txt\" SAMPLE_INPUT_FILE = \"input/sample_input.txt\" CATS = 'cats' TREES = 'trees' POMS =", "Return list. Each item is [i, {k:v, k:v...}] sue_list = [] line: str", "[] line: str for line in data: name, attribs = line[4:].split(\":\", 1) properties", "v for known_attrib, known_attrib_value in known_attribs.items(): sues_missing_attrib = [sue for sue in sue_candidates", "Sue candidates matching MFCSAM attributes: {result}\") def process_input(data): # Input looks like: #", "Scene Analysis Machine (MFCSAM). The MFCSAM produces properties, which we store as a", "determined from item received from Sue, using the My First Crime Scene Analysis", "and goldfish readings indicate that there are fewer than that many \"\"\" import", "sues_matching_attrib = [] if known_attrib in [CATS, TREES]: sues_matching_attrib = [sue for sue", "there are greater than that many Pomeranians and goldfish readings indicate that there", "are greater than that many Pomeranians and goldfish readings indicate that there are", "known_attrib_value in known_attribs.items(): sues_missing_attrib = [sue for sue in sue_candidates if known_attrib not", "sue_candidates if known_attrib in sue[1] and known_attrib_value == sue[1][known_attrib]] sue_candidates = sues_matching_attrib +", "'vizslas': 0, FISH: 5, TREES: 3, 'cars': 2, 'perfumes': 1 } def main():", "{result}\") # Part 2 sue_candidates = sue_list.copy() for known_attrib, known_attrib_value in known_attribs.items(): sues_missing_attrib", "sue in sue_candidates if known_attrib in sue[1] and known_attrib_value == sue[1][known_attrib]] sue_candidates =", "in sue[1]] sues_matching_attrib = [sue for sue in sue_candidates if known_attrib in sue[1]", "Examine list of k:v pairs determined from item received from Sue, using the", "exact match # but also consider any Sue where the k is not", "os.path.dirname(__file__) INPUT_FILE = \"input/input.txt\" SAMPLE_INPUT_FILE = \"input/sample_input.txt\" CATS = 'cats' TREES = 'trees'", "list of k:v pairs determined from item received from Sue, using the My", "known_attrib not in sue[1]] sues_matching_attrib = [sue for sue in sue_candidates if known_attrib", "= sues_matching_attrib + sues_missing_attrib result = [sue[0] for sue in sue_candidates] print(f\"Part 2:", "'cars': 2, 'perfumes': 1 } def main(): # input_file = os.path.join(SCRIPT_DIR, SAMPLE_INPUT_FILE) input_file", "[sue[0] for sue in sue_candidates] print(f\"Part 2: Aunt Sue candidates matching MFCSAM attributes:", "import os import time SCRIPT_DIR = os.path.dirname(__file__) INPUT_FILE = \"input/input.txt\" SAMPLE_INPUT_FILE = \"input/sample_input.txt\"", "any Sue where k:v is an exact match # but also consider any", "POMS: 3, 'akitas': 0, 'vizslas': 0, FISH: 5, TREES: 3, 'cars': 2, 'perfumes':", "\"input/sample_input.txt\" CATS = 'cats' TREES = 'trees' POMS = 'pomeranians' FISH = 'goldfish'", "from 500 Sues. But where we don't know a value, the key is", "Sue is a candidate. If k is present and the value matches, this", "sue[1]] sues_matching_attrib = [sue for sue in sue_candidates if known_attrib in sue[1] and", "k:v is an exact match # but also consider any Sue where the", "if known_attrib in sue[1] and known_attrib_value < sue[1][known_attrib]] elif known_attrib in [POMS, FISH]:", "that there are fewer than that many \"\"\" import os import time SCRIPT_DIR", "sue in sue_candidates if known_attrib in sue[1] and known_attrib_value < sue[1][known_attrib]] elif known_attrib", "Sue candidates matching MFCSAM attributes: {result}\") # Part 2 sue_candidates = sue_list.copy() for", "for sue in sue_candidates if known_attrib not in sue[1]] sues_matching_attrib = [] if", "TREES = 'trees' POMS = 'pomeranians' FISH = 'goldfish' known_attribs = { 'children':", "3, 'cars': 2, 'perfumes': 1 } def main(): # input_file = os.path.join(SCRIPT_DIR, SAMPLE_INPUT_FILE)", "input_file = os.path.join(SCRIPT_DIR, SAMPLE_INPUT_FILE) input_file = os.path.join(SCRIPT_DIR, INPUT_FILE) with open(input_file, mode=\"rt\") as f:", "for sue in sue_candidates if known_attrib in sue[1] and known_attrib_value < sue[1][known_attrib]] elif", "as we don't know the v for known_attrib, known_attrib_value in known_attribs.items(): sues_missing_attrib =", "int(prop[1]) for prop in properties} sue_list.append([int(name), props_dict]) return sue_list if __name__ == \"__main__\":", "if __name__ == \"__main__\": t1 = time.perf_counter() main() t2 = time.perf_counter() print(f\"Execution time:", "received from Sue, using the My First Crime Scene Analysis Machine (MFCSAM). The", "'akitas': 0, 'vizslas': 0, FISH: 5, TREES: 3, 'cars': 2, 'perfumes': 1 }", "os.path.join(SCRIPT_DIR, INPUT_FILE) with open(input_file, mode=\"rt\") as f: data = f.read().splitlines() sue_list = process_input(data)", "Sue: If the k is not present, this Sue is a candidate. If", "for sue in sue_candidates if known_attrib in sue[1] and known_attrib_value == sue[1][known_attrib]] sue_candidates", "Aunt Sue candidates matching MFCSAM attributes: {result}\") # Part 2 sue_candidates = sue_list.copy()", "also have a list of k:v attributes that we can remember from 500", "'children': 3, CATS: 7, 'samoyeds': 2, POMS: 3, 'akitas': 0, 'vizslas': 0, FISH:", "2, POMS: 3, 'akitas': 0, 'vizslas': 0, FISH: 5, TREES: 3, 'cars': 2,", "1: cars: 9, akitas: 3, goldfish: 0 # Return list. Each item is", "in sue_candidates if known_attrib in sue[1] and known_attrib_value > sue[1][known_attrib]] else: sues_matching_attrib =", "goldfish: 0 # Return list. Each item is [i, {k:v, k:v...}] sue_list =", "a list of k:v attributes that we can remember from 500 Sues. But", "INPUT_FILE) with open(input_file, mode=\"rt\") as f: data = f.read().splitlines() sue_list = process_input(data) #", "\"\"\" Author: Darren Date: 26/02/2021 Solving https://adventofcode.com/2015/day/16 500 Sues. Each with different known", "if known_attrib not in sue[1]] sues_matching_attrib = [sue for sue in sue_candidates if", "print(f\"Part 2: Aunt Sue candidates matching MFCSAM attributes: {result}\") def process_input(data): # Input", "forgetten attributes. Examine list of k:v pairs determined from item received from Sue,", "x in attribs.split(\",\")] props_dict = {prop[0]: int(prop[1]) for prop in properties} sue_list.append([int(name), props_dict])", "Input looks like: # Sue 1: cars: 9, akitas: 3, goldfish: 0 #", "k is present and the value matches, this Sue is a candidate. Part", "Sue 1: cars: 9, akitas: 3, goldfish: 0 # Return list. Each item", "matching MFCSAM attributes: {result}\") # Part 2 sue_candidates = sue_list.copy() for known_attrib, known_attrib_value", "our k:V from the MFCSAM. For each Sue: If the k is not", "= 'goldfish' known_attribs = { 'children': 3, CATS: 7, 'samoyeds': 2, POMS: 3,", "but also consider any Sue where the k is not present as we", "= [] if known_attrib in [CATS, TREES]: sues_matching_attrib = [sue for sue in", "akitas: 3, goldfish: 0 # Return list. Each item is [i, {k:v, k:v...}]", "[sue for sue in sue_candidates if known_attrib not in sue[1]] sues_matching_attrib = []", "MFCSAM produces properties, which we store as a dict. We also have a", "greater than that many Pomeranians and goldfish readings indicate that there are fewer", "> sue[1][known_attrib]] else: sues_matching_attrib = [sue for sue in sue_candidates if known_attrib in", "sue_candidates] print(f\"Part 1: Aunt Sue candidates matching MFCSAM attributes: {result}\") # Part 2", "line: str for line in data: name, attribs = line[4:].split(\":\", 1) properties =", "sue[1][known_attrib]] elif known_attrib in [POMS, FISH]: sues_matching_attrib = [sue for sue in sue_candidates", "candidate. If k is present and the value matches, this Sue is a", "of k:v attributes that we can remember from 500 Sues. But where we", "f: data = f.read().splitlines() sue_list = process_input(data) # Part 1 sue_candidates = sue_list.copy()", "Crime Scene Analysis Machine (MFCSAM). The MFCSAM produces properties, which we store as", "many Pomeranians and goldfish readings indicate that there are fewer than that many", "sue_list.copy() for known_attrib, known_attrib_value in known_attribs.items(): sues_missing_attrib = [sue for sue in sue_candidates", "3, goldfish: 0 # Return list. Each item is [i, {k:v, k:v...}] sue_list", "item received from Sue, using the My First Crime Scene Analysis Machine (MFCSAM).", "in sue_candidates if known_attrib not in sue[1]] sues_matching_attrib = [] if known_attrib in", "for prop in properties} sue_list.append([int(name), props_dict]) return sue_list if __name__ == \"__main__\": t1", "an exact match # but also consider any Sue where the k is", "that many Pomeranians and goldfish readings indicate that there are fewer than that", "a dict. We also have a list of k:v attributes that we can", "that there are greater than that many Pomeranians and goldfish readings indicate that", "sue_list.append([int(name), props_dict]) return sue_list if __name__ == \"__main__\": t1 = time.perf_counter() main() t2", "the My First Crime Scene Analysis Machine (MFCSAM). The MFCSAM produces properties, which", "= {prop[0]: int(prop[1]) for prop in properties} sue_list.append([int(name), props_dict]) return sue_list if __name__", "if known_attrib in sue[1] and known_attrib_value == sue[1][known_attrib]] sue_candidates = sues_matching_attrib + sues_missing_attrib", "[POMS, FISH]: sues_matching_attrib = [sue for sue in sue_candidates if known_attrib in sue[1]", "with open(input_file, mode=\"rt\") as f: data = f.read().splitlines() sue_list = process_input(data) # Part", "a value, the key is absent. Solution: Part 1: Iterate through our k:V", "present, this Sue is a candidate. If k is present and the value", "sues_matching_attrib = [sue for sue in sue_candidates if known_attrib in sue[1] and known_attrib_value", "known_attribs = { 'children': 3, CATS: 7, 'samoyeds': 2, POMS: 3, 'akitas': 0,", "sue_candidates if known_attrib not in sue[1]] sues_matching_attrib = [] if known_attrib in [CATS,", "Iterate through our k:V from the MFCSAM. For each Sue: If the k", "SAMPLE_INPUT_FILE) input_file = os.path.join(SCRIPT_DIR, INPUT_FILE) with open(input_file, mode=\"rt\") as f: data = f.read().splitlines()", "MFCSAM. For each Sue: If the k is not present, this Sue is", "Sues. Each with different known attributes, and potentially other forgetten attributes. Examine list", "with different known attributes, and potentially other forgetten attributes. Examine list of k:v", "as f: data = f.read().splitlines() sue_list = process_input(data) # Part 1 sue_candidates =", "7, 'samoyeds': 2, POMS: 3, 'akitas': 0, 'vizslas': 0, FISH: 5, TREES: 3,", "FISH: 5, TREES: 3, 'cars': 2, 'perfumes': 1 } def main(): # input_file", "properties, which we store as a dict. We also have a list of", "Machine (MFCSAM). The MFCSAM produces properties, which we store as a dict. We", "where the k is not present as we don't know the v for", "= f.read().splitlines() sue_list = process_input(data) # Part 1 sue_candidates = sue_list.copy() # we", "MFCSAM attributes: {result}\") def process_input(data): # Input looks like: # Sue 1: cars:", "print(f\"Part 1: Aunt Sue candidates matching MFCSAM attributes: {result}\") # Part 2 sue_candidates", "where k:v is an exact match # but also consider any Sue where", "in sue_candidates if known_attrib in sue[1] and known_attrib_value == sue[1][known_attrib]] sue_candidates = sues_matching_attrib", "attribs.split(\",\")] props_dict = {prop[0]: int(prop[1]) for prop in properties} sue_list.append([int(name), props_dict]) return sue_list", "don't know the v for known_attrib, known_attrib_value in known_attribs.items(): sues_missing_attrib = [sue for", "{prop[0]: int(prop[1]) for prop in properties} sue_list.append([int(name), props_dict]) return sue_list if __name__ ==", "SAMPLE_INPUT_FILE = \"input/sample_input.txt\" CATS = 'cats' TREES = 'trees' POMS = 'pomeranians' FISH", "[sue for sue in sue_candidates if known_attrib not in sue[1]] sues_matching_attrib = [sue", "Aunt Sue candidates matching MFCSAM attributes: {result}\") def process_input(data): # Input looks like:", "list. Each item is [i, {k:v, k:v...}] sue_list = [] line: str for", "not in sue[1]] sues_matching_attrib = [] if known_attrib in [CATS, TREES]: sues_matching_attrib =", "'pomeranians' FISH = 'goldfish' known_attribs = { 'children': 3, CATS: 7, 'samoyeds': 2,", "can remember from 500 Sues. But where we don't know a value, the", "Part 1 sue_candidates = sue_list.copy() # we need to find any Sue where", "the MFCSAM. For each Sue: If the k is not present, this Sue" ]
[ "your items that you would like to purchase here, hit 0 when you're", "#This while loop represents the scanning input, the cashier will continue to scan", "the total bill, the receipt of the total bill is displayed and the", "that has been scanned based on the UPC of the item that has", "elif item_scanned == '666666': return 'SMALL' elif item_scanned == '242424': return 'LARGE' elif", "'666666' UPC_LARGE = '242424' if product_scanned == UPC_SINGLE: subtotal_before_tax +=PRICE_SINGLE elif product_scanned ==", "\"product_scanned\")which is determined by using the UPC_SINGLE, UPC_SMALL and UPC_LARGE variables >>> calculate_subtotal('111111')", "= 0 amount_tendered = 0 item = True #This displays the welcome sign", "that you would like to purchase here, hit 0 when you're ready to", "standard rules in Canada: 0.01 to 0.02 will round down to 0.00. 0.", "= ret_val + (diff / 2.0) return ret_val # In[3]: square_root(144) # In[4]:", ">>> calculate_subtotal('111111') 1 >>> calculate_subtotal('666666') 5 >>> calculate_subtotal('242424') 19 \"\"\" subtotal_before_tax = 0", "like to try again, please repeat the process and scan your items again.", "here if __name__ == \"__main__\": #Sets the values of subtotal_before_tax and item to", "after HST is shown to the customer #'total_bill' is rounded to the nearest", "Function returns the resulting variable \"total\", rounded and formatted to 2 decimal points.", "full amount if amount_of_change < 0: print(\"Sorry about that! You are short by:", "program ends. elif amount_of_change > 0: print (\"\\nHere is your change!: $\",display_change(amount_tendered,total_bill)) display_totalbill()", "on the UPC of the item that has been scanned i.e. the \"item_scanned\"", "to scan items until he is done (i.e. hits 0) while get_barcode(item)!= 'done':", "time an item is scanned def calculate_subtotal(product_scanned): \"\"\" (str) -> int Returns the", "of print functions for the total bill which includes: subtotal before tax, HST", "order is cancelled. The customer can repeat the process again by re-running the", "#This function calculates the subtotal as a running total, which updates each time", "displays the final total bill def display_totalbill(): # Returns a series of print", "Please enter an appropriate UPC for your item\") #This function calculates the subtotal", "display_change(amount_tendered, total_bill)) print(\"\\nThank you for shopping with MinMax!\") #The main function starts here", "shopping with MinMax!\") #The main function starts here if __name__ == \"__main__\": #Sets", "until the full amount of the bill is paid then thanks the customer", "amount owed!\") display_totalbill() #If the customer pays more than the full amount owed", "this purchase, just hit 0 again.\") if amount_tendered == 0: return \"end\" else:", "0.06 to 0.07 will round down to 0.05. 0.08 to 0.09 will round", "is done (i.e. hits 0) while get_barcode(item)!= 'done': item = input(\"Scan your items", "/ 0.05), 2), '.2f')) #Sets the value for the amount of change either", "rounding: $\", format(total_bill, '.2f')) print(\"Payment: $\", format(amount_tendered, '.2f')) print(\"----------------\\nChange: $\", display_change(amount_tendered, total_bill)) print(\"\\nThank", "subtotal_before_tax #This function gets how much the customer gives i.e. input(\"enter your stuff\")", "upcoming loops subtotal_before_tax = 0 amount_tendered = 0 item = True #This displays", "points. Variable \"total\" is then rounded to the nearest 5 cents using the", "def get_amount_tendered(): \"\"\" Returns either the end of this program or the value", "farewell greeting #All values returned are displayed with two decimal points in the", "pressed (because 0 means done), the total price ('total_bill') after HST is shown", "the payment for the customer, it repeats until the full amount of the", "the welcome sign to the MinMax Store display_welcome() #This while loop represents the", "+= PRICE_LARGE return subtotal_before_tax #This function gets how much the customer gives i.e.", "an input prompt. If the cashier hits '0', the program is ended due", "= 0 item = True #This displays the welcome sign to the MinMax", "done), the total price ('total_bill') after HST is shown to the customer #'total_bill'", "shown to the customer #'total_bill' is rounded to the nearest 5 cents using", "30 >>> get_amount_tendered(40) 40 >>>get_amount_tendered(50) 50 >>>get_amount_tendered(0) Thanks for shopping at MinMax! You", "if amount_tendered == 0: sys.exit(\"Thanks for shopping at MinMax! You have cancelled your", "the customer gives i.e. input(\"enter your stuff\") def get_amount_tendered(): \"\"\" Returns either the", "return 'LARGE' elif item_scanned == '0': return 'done' else: print (\"Oops! You entered", "of the customer's purchase before tax by using the price of the item", "return ret_val # In[3]: square_root(144) # In[4]: def display_welcome(): \"\"\" Returns string 'Welcome", "to finish up! \") subtotal_before_tax += calculate_subtotal(item) # As the loop continues, the", "price after rounding, payment from the customer , any change owed to the", "the receipt of the total bil is displayed, change is given to the", "-> int Returns the subtotal of the customer's purchase before tax by using", ">>> display_change(10.7,1.4) 9.30 \"\"\" difference = abs(total_bill-amount_tendered) return (format(difference, '.2f')) #This function calculates", "the format of $0.00 print(\"\\nHere is your bill! \\nSubtotal: $\", format(subtotal_before_tax, '.2f')) print(\"HST:", "to accumulate and show on the screen for them to view print(\"Your subtotal", "the subtotal of the customer's purchase before tax by using the price of", "the full amount of the bill is paid then thanks the customer and", "full amount of the bill is paid then thanks the customer and provides", "to 0.10 >>> display_change(10.0,7.97) 2.05 >>> display_change(10.5,2.0) 8.50 >>> display_change(10.7,1.4) 9.30 \"\"\" difference", "provides a final receipt #All values returned are displayed with two decimal points", ">>> calculate_total_bill(2.05) 2.30 \"\"\" HST_RATE = 1.13 total_bill = subtotal *HST_RATE return format(round(0.05", "the customer is less than the cost of the total bill, the customer", "still owed to the MinMax store. The variable \"difference\" is formatted to return", "#If the customer pays the full amount owed on the total bill, the", "string 'Welcome to MinMax' >>> display_welcome() 'Welcome to MinMax!' \"\"\" print(\"Welcome to MinMax!\")", "'0', the program is ended due to cancellation. If the customer provides any", "'SMALL' elif item_scanned == '242424': return 'LARGE' elif item_scanned == '0': return 'done'", "the end of this program or the value of the amount tendered by", "True #This displays the welcome sign to the MinMax Store display_welcome() #This while", "('total_bill') after HST is shown to the customer #'total_bill' is rounded to the", "main function starts here if __name__ == \"__main__\": #Sets the values of subtotal_before_tax", "2), '.2f') #This function displays the final total bill def display_totalbill(): # Returns", "function starts here if __name__ == \"__main__\": #Sets the values of subtotal_before_tax and", "== 0: print (\"You've entered the full amount owed!\") display_totalbill() #If the customer", "for your item\") #This function calculates the subtotal as a running total, which", "#This while loop represents the payment for the customer, it repeats until the", "Store display_welcome() #This while loop represents the scanning input, the cashier will continue", "paid then thanks the customer and provides a final receipt #All values returned", "for the customer, it repeats until the full amount of the bill is", "customer enters 0, the the order is cancelled. The customer can repeat the", "to 0.00. 0. 03 to 0.04 will round up to 0.05. 0.06 to", "how much change is owed to the customer, or still owed to the", "#This displays the welcome sign to the MinMax Store display_welcome() #This while loop", "get_amount_tendered(40) 40 >>>get_amount_tendered(50) 50 >>>get_amount_tendered(0) Thanks for shopping at MinMax! You have cancelled", "(i.e. 10.50 instead of 10.5). \"difference\" is then rounded to the nearest 5", "the upcoming loops subtotal_before_tax = 0 amount_tendered = 0 item = True #This", "format of $0.00 while amount_of_change < 0: amount_tendered = float(get_amount_tendered()) #If customer enters", "subtotal_before_tax = 0 PRICE_SINGLE = 1 PRICE_SMALL = 5 PRICE_LARGE = 19 UPC_SINGLE", "nearest nickel, total price after rounding, payment from the customer , any change", "down to 0.05. 0.08 to 0.09 will round up to 0.10 >>> display_change(10.0,7.97)", "points in the format of $0.00 while amount_of_change < 0: amount_tendered = float(get_amount_tendered())", "the customer or given to the customer amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #This", "customer is prompted to try again to pay full amount if amount_of_change <", "Universal Price Code (UPC). Please enter an appropriate UPC for your item\") #This", "cancellation. If the customer provides any other value, this is captured as the", "based on the UPC of the item that has been scanned i.e. the", "updates each time an item is scanned def calculate_subtotal(product_scanned): \"\"\" (str) -> int", "Returns either the end of this program or the value of the amount", "from the customer , any change owed to the customer and a farewell", "so far is: $\", format(subtotal_before_tax,'.2f')) #Once the loop is over and 0 has", "def calculate_subtotal(product_scanned): \"\"\" (str) -> int Returns the subtotal of the customer's purchase", "return (format(difference, '.2f')) #This function calculates the total cost as a running total", "$\", format(subtotal_before_tax, '.2f')) print(\"HST: $\", format(0.13 * subtotal_before_tax, '.2f')) print(\"Total price before rounding:", "the customer and a farewell greeting #All values returned are displayed with two", "provides any other value, this is captured as the amount tendered by the", "2 decimal points. Variable \"total\" is then rounded to the nearest 5 cents", "= True #This displays the welcome sign to the MinMax Store display_welcome() #This", "float Returns the difference as the variable \"difference\" in value between total_bill and", "it repeats until the full amount of the bill is paid then thanks", "= sq_rand diff = sq_rand - ret_val*ret_val while abs(diff) > 0.000001: diff =", "determined by using the UPC_SINGLE, UPC_SMALL and UPC_LARGE variables >>> calculate_subtotal('111111') 1 >>>", "'0': return 'done' else: print (\"Oops! You entered an unrecognized Universal Price Code", "if amount_tendered == 0: return \"end\" else: return amount_tendered #This function displays the", "0.02 will round down to 0.00. 0. 03 to 0.04 will round up", "loops subtotal_before_tax = 0 amount_tendered = 0 item = True #This displays the", "been scanned based on the UPC of the item that has been scanned", "\"\"\" if item_scanned == '111111': return 'SINGLES' elif item_scanned == '666666': return 'SMALL'", "again, please repeat the process and scan your items again.\") amount_of_change = round(0.05*round(float(amount_tendered", "amount_tendered, thus indicating how much change is owed to the customer, or still", "loop represents the payment for the customer, it repeats until the full amount", "the full amount owed!\") display_totalbill() #If the customer pays more than the full", "scanned def calculate_subtotal(product_scanned): \"\"\" (str) -> int Returns the subtotal of the customer's", "program ends elif amount_of_change == 0: print (\"You've entered the full amount owed!\")", "much change is owed to the customer, or still owed to the MinMax", "shopping at MinMax! You have cancelled your order. If you'd like to try", "MinMax Store display_welcome() #This while loop represents the scanning input, the cashier will", "up to 0.10 >>> display_change(10.0,7.97) 2.05 >>> display_change(10.5,2.0) 8.50 >>> display_change(10.7,1.4) 9.30 \"\"\"", "to view print(\"Your subtotal so far is: $\", format(subtotal_before_tax,'.2f')) #Once the loop is", "will round up to 0.05. 0.06 to 0.07 will round down to 0.05.", "final total bill def display_totalbill(): # Returns a series of print functions for", "#!/usr/bin/env python # coding: utf-8 # In[2]: def square_root( sq_rand: float ) ->", "'242424': return 'LARGE' elif item_scanned == '0': return 'done' else: print (\"Oops! You", "to the bill, Total price before rounding to the nearest nickel, total price", "0.05. 0.08 to 0.09 will round up to 0.10 >>> calculate_total_bill(3.0) 3.40 >>>", "format(amount_tendered, '.2f')) print(\"----------------\\nChange: $\", display_change(amount_tendered, total_bill)) print(\"\\nThank you for shopping with MinMax!\") #The", "scanned based on the UPC of the item that has been scanned i.e.", "the bill is paid then thanks the customer and provides a final receipt", "the UPC of the item that has been scanned i.e. the \"item_scanned\" parameter", "captured as the amount tendered by the customer i.e. the 'amount_tendered' variable >>>", "a running total, which updates each time an item is scanned def calculate_subtotal(product_scanned):", "will continue to accumulate and show on the screen for them to view", "them to view print(\"Your subtotal so far is: $\", format(subtotal_before_tax,'.2f')) #Once the loop", "'.2f') #This function displays the final total bill def display_totalbill(): # Returns a", "Returns string 'Welcome to MinMax' >>> display_welcome() 'Welcome to MinMax!' \"\"\" print(\"Welcome to", "items again. \"\"\" amount_tendered = input(\"Using the total displayed, please pay the complete", "\"\"\" (str) -> str Returns the type of item that has been scanned", "an input HST_RATE variable in this function is multiplied by inputted variable Function", "sq_rand - ret_val*ret_val while abs(diff) > 0.000001: diff = sq_rand - ret_val*ret_val ret_val", "MinMax!\") #The main function starts here if __name__ == \"__main__\": #Sets the values", "abs(diff) > 0.000001: diff = sq_rand - ret_val*ret_val ret_val = ret_val + (diff", "get_barcode(item_scanned): \"\"\" (str) -> str Returns the type of item that has been", "19 \"\"\" subtotal_before_tax = 0 PRICE_SINGLE = 1 PRICE_SMALL = 5 PRICE_LARGE =", "the complete amount owed via cash only. If you'd like to cancel this", "is displayed, change is given to the customer and the program ends. elif", "'666666': return 'SMALL' elif item_scanned == '242424': return 'LARGE' elif item_scanned == '0':", "is scanned def calculate_subtotal(product_scanned): \"\"\" (str) -> int Returns the subtotal of the", "to the nearest 5 cents using the nickel rounding scheme mentioned already total_bill", "while amount_of_change < 0: amount_tendered = float(get_amount_tendered()) #If customer enters 0, the the", "Codes (UPC) per product to scan def get_barcode(item_scanned): \"\"\" (str) -> str Returns", "total bill, the receipt of the total bill is displayed and the program", "amount of the bill is paid then thanks the customer and provides a", "which includes: subtotal before tax, HST added to the bill, Total price before", "the process and scan your items again. \"\"\" amount_tendered = input(\"Using the total", "the customer pays more than the full amount owed on the total bill,", ") -> float: ret_val = sq_rand diff = sq_rand - ret_val*ret_val while abs(diff)", "total bill def display_totalbill(): # Returns a series of print functions for the", "2.05 >>> display_change(10.5,2.0) 8.50 >>> display_change(10.7,1.4) 9.30 \"\"\" difference = abs(total_bill-amount_tendered) return (format(difference,", "to 0.09 will round up to 0.10 >>> display_change(10.0,7.97) 2.05 >>> display_change(10.5,2.0) 8.50", "ret_val*ret_val while abs(diff) > 0.000001: diff = sq_rand - ret_val*ret_val ret_val = ret_val", "before rounding to the nearest nickel, total price after rounding, payment from the", "your bill! \\nSubtotal: $\", format(subtotal_before_tax, '.2f')) print(\"HST: $\", format(0.13 * subtotal_before_tax, '.2f')) print(\"Total", "1 >>> calculate_subtotal('666666') 5 >>> calculate_subtotal('242424') 19 \"\"\" subtotal_before_tax = 0 PRICE_SINGLE =", "(diff / 2.0) return ret_val # In[3]: square_root(144) # In[4]: def display_welcome(): \"\"\"", "by inputted variable Function returns the resulting variable \"total\", rounded and formatted to", "-> float: ret_val = sq_rand diff = sq_rand - ret_val*ret_val while abs(diff) >", "enter the full amount of $\",total_bill) #If the customer pays the full amount", "payment for the customer, it repeats until the full amount of the bill", "taxes, you owe: $\",format(round(0.05 * round((total_bill) / 0.05), 2), '.2f')) #Sets the value", "displayed, please pay the complete amount owed via cash only. If you'd like", "much the customer gives i.e. input(\"enter your stuff\") def get_amount_tendered(): \"\"\" Returns either", "PRICE_LARGE return subtotal_before_tax #This function gets how much the customer gives i.e. input(\"enter", ">>>get_barcode('111111') 'SINGLES' >>>get_barcode('666666') 'SMALL' >>>get_barcode('242424') 'LARGE' \"\"\" if item_scanned == '111111': return 'SINGLES'", "'SMALL' >>>get_barcode('242424') 'LARGE' \"\"\" if item_scanned == '111111': return 'SINGLES' elif item_scanned ==", "the resulting variable \"total\", rounded and formatted to 2 decimal points. Variable \"total\"", "rounded and formatted to 2 decimal points. Variable \"total\" is then rounded to", "\"\"\" Returns string 'Welcome to MinMax' >>> display_welcome() 'Welcome to MinMax!' \"\"\" print(\"Welcome", "your item\") #This function calculates the subtotal as a running total, which updates", "other value, this is captured as the amount tendered by the customer i.e.", "UPC_LARGE = '242424' if product_scanned == UPC_SINGLE: subtotal_before_tax +=PRICE_SINGLE elif product_scanned == UPC_SMALL:", "and a farewell greeting #All values returned are displayed with two decimal points", "0) while get_barcode(item)!= 'done': item = input(\"Scan your items that you would like", "#This function is to set values for Universal Price Codes (UPC) per product", "cashier hits '0', the program is ended due to cancellation. If the customer", "5 PRICE_LARGE = 19 UPC_SINGLE = '111111' UPC_SMALL = '666666' UPC_LARGE = '242424'", "ret_val + (diff / 2.0) return ret_val # In[3]: square_root(144) # In[4]: def", "given to the customer amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #This while loop represents", "by using the price of the item that has been scanned (i.e. the", "\"\"\" Returns either the end of this program or the value of the", "display_welcome(): \"\"\" Returns string 'Welcome to MinMax' >>> display_welcome() 'Welcome to MinMax!' \"\"\"", "UPC_LARGE: subtotal_before_tax += PRICE_LARGE return subtotal_before_tax #This function gets how much the customer", "item that has been scanned i.e. the \"item_scanned\" parameter >>>get_barcode('111111') 'SINGLES' >>>get_barcode('666666') 'SMALL'", "repeat the process and scan your items again. \"\"\" amount_tendered = input(\"Using the", "50 >>>get_amount_tendered(0) Thanks for shopping at MinMax! You have cancelled your order. If", "$\", format(subtotal_before_tax,'.2f')) #Once the loop is over and 0 has been pressed (because", "value of the amount tendered by the customer by using an input prompt.", "the program is ended due to cancellation. If the customer provides any other", "# In[3]: square_root(144) # In[4]: def display_welcome(): \"\"\" Returns string 'Welcome to MinMax'", "price ('total_bill') after HST is shown to the customer #'total_bill' is rounded to", "ret_val = ret_val + (diff / 2.0) return ret_val # In[3]: square_root(144) #", "customer , any change owed to the customer and a farewell greeting #All", "# Returns a series of print functions for the total bill which includes:", "= sq_rand - ret_val*ret_val ret_val = ret_val + (diff / 2.0) return ret_val", "values for Universal Price Codes (UPC) per product to scan def get_barcode(item_scanned): \"\"\"", "get_amount_tendered(): \"\"\" Returns either the end of this program or the value of", "to return as a float with two decimal points, including zeroes (i.e. 10.50", "the receipt of the total bill is displayed and the program ends elif", "will continue to scan items until he is done (i.e. hits 0) while", "you'd like to cancel this purchase, just hit 0 again.\") if amount_tendered ==", "has been scanned (i.e. the parameter \"product_scanned\")which is determined by using the UPC_SINGLE,", "function is multiplied by inputted variable Function returns the resulting variable \"total\", rounded", "will round down to 0.05. 0.08 to 0.09 will round up to 0.10", "are displayed with two decimal points in the format of $0.00 print(\"\\nHere is", "to MinMax' >>> display_welcome() 'Welcome to MinMax!' \"\"\" print(\"Welcome to MinMax!\") #This function", "the total cost as a running total def calculate_total_bill(subtotal): \"\"\" (float) -> float", "calculate_total_bill(6.67) 7.55 >>> calculate_total_bill(2.05) 2.30 \"\"\" HST_RATE = 1.13 total_bill = subtotal *HST_RATE", "while loop represents the payment for the customer, it repeats until the full", "Code (UPC). Please enter an appropriate UPC for your item\") #This function calculates", "'.2f')) print(\"Total price after rounding: $\", format(total_bill, '.2f')) print(\"Payment: $\", format(amount_tendered, '.2f')) print(\"----------------\\nChange:", "calculate_total_bill(3.0) 3.40 >>> calculate_total_bill(6.67) 7.55 >>> calculate_total_bill(2.05) 2.30 \"\"\" HST_RATE = 1.13 total_bill", "the difference as the variable \"difference\" in value between total_bill and amount_tendered, thus", "In[2]: def square_root( sq_rand: float ) -> float: ret_val = sq_rand diff =", "customer and a farewell greeting #All values returned are displayed with two decimal", "change is given to the customer and the program ends. elif amount_of_change >", "UPC_SMALL: subtotal_before_tax +=PRICE_SMALL elif product_scanned == UPC_LARGE: subtotal_before_tax += PRICE_LARGE return subtotal_before_tax #This", "def square_root( sq_rand: float ) -> float: ret_val = sq_rand diff = sq_rand", "is then rounded to the nearest 5 cents using the following nickel rounding", "-> float subtotal is passed through as an input HST_RATE variable in this", "cancelled. The customer can repeat the process again by re-running the program. if", "(str) -> int Returns the subtotal of the customer's purchase before tax by", "been scanned (i.e. the parameter \"product_scanned\")which is determined by using the UPC_SINGLE, UPC_SMALL", "for the amount of change either owed by the customer or given to", "the 'amount_tendered' variable >>> get_amount_tendered(30) 30 >>> get_amount_tendered(40) 40 >>>get_amount_tendered(50) 50 >>>get_amount_tendered(0) Thanks", "a final receipt #All values returned are displayed with two decimal points in", "\"difference\" in value between total_bill and amount_tendered, thus indicating how much change is", "repeat the process again by re-running the program. if amount_tendered == 0: sys.exit(\"Thanks", "round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #This while loop represents the payment for the customer, it", "of the total bill, the customer is prompted to try again to pay", "You have cancelled your order. If you'd like to try again, please repeat", "UPC for your item\") #This function calculates the subtotal as a running total,", "again.\") if amount_tendered == 0: return \"end\" else: return amount_tendered #This function displays", "points in the format of $0.00 print(\"\\nHere is your bill! \\nSubtotal: $\", format(subtotal_before_tax,", "MinMax! You have cancelled your order. If you'd like to try again, please", "through as an input HST_RATE variable in this function is multiplied by inputted", "displays the welcome sign to the MinMax Store display_welcome() #This while loop represents", ">>> calculate_total_bill(6.67) 7.55 >>> calculate_total_bill(2.05) 2.30 \"\"\" HST_RATE = 1.13 total_bill = subtotal", "\") subtotal_before_tax += calculate_subtotal(item) # As the loop continues, the customer's subtotal so", "and UPC_LARGE variables >>> calculate_subtotal('111111') 1 >>> calculate_subtotal('666666') 5 >>> calculate_subtotal('242424') 19 \"\"\"", "and item to be used in the upcoming loops subtotal_before_tax = 0 amount_tendered", "following nickel rounding scheme standard rules in Canada: 0.01 to 0.02 will round", "and provides a final receipt #All values returned are displayed with two decimal", "PRICE_LARGE = 19 UPC_SINGLE = '111111' UPC_SMALL = '666666' UPC_LARGE = '242424' if", "by using an input prompt. If the cashier hits '0', the program is", "then rounded to the nearest 5 cents using the following nickel rounding scheme", "will round up to 0.10 >>> calculate_total_bill(3.0) 3.40 >>> calculate_total_bill(6.67) 7.55 >>> calculate_total_bill(2.05)", "__name__ == \"__main__\": #Sets the values of subtotal_before_tax and item to be used", "while abs(diff) > 0.000001: diff = sq_rand - ret_val*ret_val ret_val = ret_val +", "to 0.05. 0.08 to 0.09 will round up to 0.10 >>> calculate_total_bill(3.0) 3.40", "(float) -> float subtotal is passed through as an input HST_RATE variable in", "= round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #If the amount tendered by the customer is less", "the \"item_scanned\" parameter >>>get_barcode('111111') 'SINGLES' >>>get_barcode('666666') 'SMALL' >>>get_barcode('242424') 'LARGE' \"\"\" if item_scanned ==", "can repeat the process again by re-running the program. if amount_tendered == 0:", "'LARGE' elif item_scanned == '0': return 'done' else: print (\"Oops! You entered an", "Variable \"total\" is then rounded to the nearest 5 cents using the following", "an unrecognized Universal Price Code (UPC). Please enter an appropriate UPC for your", "rules in Canada: 0.01 to 0.02 will round down to 0.00. 0. 03", "owed on the total bill, the receipt of the total bill is displayed", "elif amount_of_change > 0: print (\"\\nHere is your change!: $\",display_change(amount_tendered,total_bill)) display_totalbill() # In[", "Returns a series of print functions for the total bill which includes: subtotal", ">>> get_amount_tendered(30) 30 >>> get_amount_tendered(40) 40 >>>get_amount_tendered(50) 50 >>>get_amount_tendered(0) Thanks for shopping at", "to pay full amount if amount_of_change < 0: print(\"Sorry about that! You are", "#Sets the values of subtotal_before_tax and item to be used in the upcoming", "scheme mentioned already total_bill = float(calculate_total_bill(subtotal_before_tax)) print(\"\\nAfter taxes, you owe: $\",format(round(0.05 * round((total_bill)", "< 0: print(\"Sorry about that! You are short by: $\",format(abs(amount_of_change),'.2f'),\"Please try again and", "to cancel this purchase, just hit 0 again.\") if amount_tendered == 0: return", "input(\"Using the total displayed, please pay the complete amount owed via cash only.", "function calculates the total cost as a running total def calculate_total_bill(subtotal): \"\"\" (float)", "amount_of_change < 0: print(\"Sorry about that! You are short by: $\",format(abs(amount_of_change),'.2f'),\"Please try again", "per product to scan def get_barcode(item_scanned): \"\"\" (str) -> str Returns the type", "'242424' if product_scanned == UPC_SINGLE: subtotal_before_tax +=PRICE_SINGLE elif product_scanned == UPC_SMALL: subtotal_before_tax +=PRICE_SMALL", "'.2f')) #This function calculates the total cost as a running total def calculate_total_bill(subtotal):", "before tax, HST added to the bill, Total price before rounding to the", "price before rounding: $\", format(subtotal_before_tax * 1.13, '.2f')) print(\"Total price after rounding: $\",", "UPC_SMALL and UPC_LARGE variables >>> calculate_subtotal('111111') 1 >>> calculate_subtotal('666666') 5 >>> calculate_subtotal('242424') 19", "coding: utf-8 # In[2]: def square_root( sq_rand: float ) -> float: ret_val =", "if item_scanned == '111111': return 'SINGLES' elif item_scanned == '666666': return 'SMALL' elif", "accumulate and show on the screen for them to view print(\"Your subtotal so", "If you'd like to try again, please repeat the process and scan your", "the value of the amount tendered by the customer by using an input", "item\") #This function calculates the subtotal as a running total, which updates each", "1.13, '.2f')) print(\"Total price after rounding: $\", format(total_bill, '.2f')) print(\"Payment: $\", format(amount_tendered, '.2f'))", "bill is displayed and the program ends elif amount_of_change == 0: print (\"You've", "just hit 0 again.\") if amount_tendered == 0: return \"end\" else: return amount_tendered", "to the MinMax Store display_welcome() #This while loop represents the scanning input, the", "# In[4]: def display_welcome(): \"\"\" Returns string 'Welcome to MinMax' >>> display_welcome() 'Welcome", "decimal points. Variable \"total\" is then rounded to the nearest 5 cents using", "repeats until the full amount of the bill is paid then thanks the", "pays the full amount owed on the total bill, the receipt of the", "up to 0.10 >>> calculate_total_bill(3.0) 3.40 >>> calculate_total_bill(6.67) 7.55 >>> calculate_total_bill(2.05) 2.30 \"\"\"", "(because 0 means done), the total price ('total_bill') after HST is shown to", "two decimal points in the format of $0.00 print(\"\\nHere is your bill! \\nSubtotal:", "please repeat the process and scan your items again. \"\"\" amount_tendered = input(\"Using", "that! You are short by: $\",format(abs(amount_of_change),'.2f'),\"Please try again and enter the full amount", "will round down to 0.00. 0. 03 to 0.04 will round up to", "'SINGLES' elif item_scanned == '666666': return 'SMALL' elif item_scanned == '242424': return 'LARGE'", "HST_RATE variable in this function is multiplied by inputted variable Function returns the", "value, this is captured as the amount tendered by the customer i.e. the", "UPC_SINGLE, UPC_SMALL and UPC_LARGE variables >>> calculate_subtotal('111111') 1 >>> calculate_subtotal('666666') 5 >>> calculate_subtotal('242424')", "format(total_bill, '.2f')) print(\"Payment: $\", format(amount_tendered, '.2f')) print(\"----------------\\nChange: $\", display_change(amount_tendered, total_bill)) print(\"\\nThank you for", "function displays the final total bill def display_totalbill(): # Returns a series of", "with two decimal points in the format of $0.00 while amount_of_change < 0:", "str Returns the type of item that has been scanned based on the", "MinMax store. The variable \"difference\" is formatted to return as a float with", "order. If you'd like to try again, please repeat the process and scan", "variable \"difference\" in value between total_bill and amount_tendered, thus indicating how much change", "at MinMax! You have cancelled your order. If you'd like to try again,", "less than the cost of the total bill, the customer is prompted to", "to try again, please repeat the process and scan your items again.\") amount_of_change", "i.e. the \"item_scanned\" parameter >>>get_barcode('111111') 'SINGLES' >>>get_barcode('666666') 'SMALL' >>>get_barcode('242424') 'LARGE' \"\"\" if item_scanned", "function is to set values for Universal Price Codes (UPC) per product to", ">>> calculate_subtotal('242424') 19 \"\"\" subtotal_before_tax = 0 PRICE_SINGLE = 1 PRICE_SMALL = 5", "customer def display_change(total_bill,amount_tendered): \"\"\" (float,float) -> float Returns the difference as the variable", "= input(\"Scan your items that you would like to purchase here, hit 0", "been scanned i.e. the \"item_scanned\" parameter >>>get_barcode('111111') 'SINGLES' >>>get_barcode('666666') 'SMALL' >>>get_barcode('242424') 'LARGE' \"\"\"", "enter an appropriate UPC for your item\") #This function calculates the subtotal as", "diff = sq_rand - ret_val*ret_val ret_val = ret_val + (diff / 2.0) return", "including zeroes (i.e. 10.50 instead of 10.5). \"difference\" is then rounded to the", "thanks the customer and provides a final receipt #All values returned are displayed", "to try again, please repeat the process and scan your items again. \"\"\"", "how much the customer gives i.e. input(\"enter your stuff\") def get_amount_tendered(): \"\"\" Returns", "again by re-running the program. if amount_tendered == 0: sys.exit(\"Thanks for shopping at", "elif amount_of_change == 0: print (\"You've entered the full amount owed!\") display_totalbill() #If", "scanned i.e. the \"item_scanned\" parameter >>>get_barcode('111111') 'SINGLES' >>>get_barcode('666666') 'SMALL' >>>get_barcode('242424') 'LARGE' \"\"\" if", "$0.00 print(\"\\nHere is your bill! \\nSubtotal: $\", format(subtotal_before_tax, '.2f')) print(\"HST: $\", format(0.13 *", "0.07 will round down to 0.05. 0.08 to 0.09 will round up to", "the item that has been scanned i.e. the \"item_scanned\" parameter >>>get_barcode('111111') 'SINGLES' >>>get_barcode('666666')", "'.2f')) print(\"HST: $\", format(0.13 * subtotal_before_tax, '.2f')) print(\"Total price before rounding: $\", format(subtotal_before_tax", "is your bill! \\nSubtotal: $\", format(subtotal_before_tax, '.2f')) print(\"HST: $\", format(0.13 * subtotal_before_tax, '.2f'))", "the total bill is displayed and the program ends elif amount_of_change == 0:", "series of print functions for the total bill which includes: subtotal before tax,", "down to 0.05. 0.08 to 0.09 will round up to 0.10 >>> calculate_total_bill(3.0)", "subtotal_before_tax += PRICE_LARGE return subtotal_before_tax #This function gets how much the customer gives", "bill which includes: subtotal before tax, HST added to the bill, Total price", "amount_of_change < 0: amount_tendered = float(get_amount_tendered()) #If customer enters 0, the the order", "is formatted to return as a float with two decimal points, including zeroes", "is given to the customer and the program ends. elif amount_of_change > 0:", "purchase before tax by using the price of the item that has been", "subtotal as a running total, which updates each time an item is scanned", "that has been scanned (i.e. the parameter \"product_scanned\")which is determined by using the", "amount tendered by the customer i.e. the 'amount_tendered' variable >>> get_amount_tendered(30) 30 >>>", "def display_change(total_bill,amount_tendered): \"\"\" (float,float) -> float Returns the difference as the variable \"difference\"", "Total price before rounding to the nearest nickel, total price after rounding, payment", "or given to the customer amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #This while loop", "the order is cancelled. The customer can repeat the process again by re-running", "As the loop continues, the customer's subtotal so far will continue to accumulate", "round up to 0.10 >>> calculate_total_bill(3.0) 3.40 >>> calculate_total_bill(6.67) 7.55 >>> calculate_total_bill(2.05) 2.30", "entered an unrecognized Universal Price Code (UPC). Please enter an appropriate UPC for", "the process and scan your items again.\") amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #If", "given to the customer def display_change(total_bill,amount_tendered): \"\"\" (float,float) -> float Returns the difference", "loop represents the scanning input, the cashier will continue to scan items until", "total, which updates each time an item is scanned def calculate_subtotal(product_scanned): \"\"\" (str)", "subtotal_before_tax += calculate_subtotal(item) # As the loop continues, the customer's subtotal so far", "any other value, this is captured as the amount tendered by the customer", "* round(float(total_bill)/0.05), 2), '.2f') #This function displays the final total bill def display_totalbill():", "about that! You are short by: $\",format(abs(amount_of_change),'.2f'),\"Please try again and enter the full", "final receipt #All values returned are displayed with two decimal points in the", "product_scanned == UPC_SMALL: subtotal_before_tax +=PRICE_SMALL elif product_scanned == UPC_LARGE: subtotal_before_tax += PRICE_LARGE return", "1.13 total_bill = subtotal *HST_RATE return format(round(0.05 * round(float(total_bill)/0.05), 2), '.2f') #This function", "which updates each time an item is scanned def calculate_subtotal(product_scanned): \"\"\" (str) ->", "like to cancel this purchase, just hit 0 again.\") if amount_tendered == 0:", "amount_tendered == 0: return \"end\" else: return amount_tendered #This function displays the change", "0 again.\") if amount_tendered == 0: return \"end\" else: return amount_tendered #This function", "value for the amount of change either owed by the customer or given", "a series of print functions for the total bill which includes: subtotal before", "values of subtotal_before_tax and item to be used in the upcoming loops subtotal_before_tax", "item that has been scanned (i.e. the parameter \"product_scanned\")which is determined by using", "10.50 instead of 10.5). \"difference\" is then rounded to the nearest 5 cents", "and scan your items again. \"\"\" amount_tendered = input(\"Using the total displayed, please", "== \"__main__\": #Sets the values of subtotal_before_tax and item to be used in", "and scan your items again.\") amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #If the amount", "owed to the MinMax store. The variable \"difference\" is formatted to return as", "hit 0 again.\") if amount_tendered == 0: return \"end\" else: return amount_tendered #This", "is owed to the customer, or still owed to the MinMax store. The", "0.05. 0.06 to 0.07 will round down to 0.05. 0.08 to 0.09 will", "bill, the customer is prompted to try again to pay full amount if", "zeroes (i.e. 10.50 instead of 10.5). \"difference\" is then rounded to the nearest", "ret_val # In[3]: square_root(144) # In[4]: def display_welcome(): \"\"\" Returns string 'Welcome to", "#All values returned are displayed with two decimal points in the format of", "is: $\", format(subtotal_before_tax,'.2f')) #Once the loop is over and 0 has been pressed", "amount of change either owed by the customer or given to the customer", "items again.\") amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #If the amount tendered by the", "10.5). \"difference\" is then rounded to the nearest 5 cents using the following", "formatted to return as a float with two decimal points, including zeroes (i.e.", "using the price of the item that has been scanned (i.e. the parameter", "total displayed, please pay the complete amount owed via cash only. If you'd", "try again, please repeat the process and scan your items again. \"\"\" amount_tendered", "= input(\"Using the total displayed, please pay the complete amount owed via cash", "int Returns the subtotal of the customer's purchase before tax by using the", "subtotal_before_tax = 0 amount_tendered = 0 item = True #This displays the welcome", "unrecognized Universal Price Code (UPC). Please enter an appropriate UPC for your item\")", "format(0.13 * subtotal_before_tax, '.2f')) print(\"Total price before rounding: $\", format(subtotal_before_tax * 1.13, '.2f'))", "rounding scheme standard rules in Canada: 0.01 to 0.02 will round down to", "values returned are displayed with two decimal points in the format of $0.00", "bill, the receipt of the total bill is displayed and the program ends", "when you're ready to finish up! \") subtotal_before_tax += calculate_subtotal(item) # As the", "#Once the loop is over and 0 has been pressed (because 0 means", "will round up to 0.10 >>> display_change(10.0,7.97) 2.05 >>> display_change(10.5,2.0) 8.50 >>> display_change(10.7,1.4)", "by the customer by using an input prompt. If the cashier hits '0',", "while get_barcode(item)!= 'done': item = input(\"Scan your items that you would like to", "by: $\",format(abs(amount_of_change),'.2f'),\"Please try again and enter the full amount of $\",total_bill) #If the", "0 item = True #This displays the welcome sign to the MinMax Store", "bill is paid then thanks the customer and provides a final receipt #All", "#If the amount tendered by the customer is less than the cost of", "total bill, the receipt of the total bil is displayed, change is given", "your items again. \"\"\" amount_tendered = input(\"Using the total displayed, please pay the", "and enter the full amount of $\",total_bill) #If the customer pays the full", "If the cashier hits '0', the program is ended due to cancellation. If", "= 0 PRICE_SINGLE = 1 PRICE_SMALL = 5 PRICE_LARGE = 19 UPC_SINGLE =", "please repeat the process and scan your items again.\") amount_of_change = round(0.05*round(float(amount_tendered -", "<gh_stars>1-10 #!/usr/bin/env python # coding: utf-8 # In[2]: def square_root( sq_rand: float )", "program or the value of the amount tendered by the customer by using", "customer's subtotal so far will continue to accumulate and show on the screen", "item is scanned def calculate_subtotal(product_scanned): \"\"\" (str) -> int Returns the subtotal of", "to 0.05. 0.08 to 0.09 will round up to 0.10 >>> display_change(10.0,7.97) 2.05", "loop continues, the customer's subtotal so far will continue to accumulate and show", "the MinMax store. The variable \"difference\" is formatted to return as a float", "total price after rounding, payment from the customer , any change owed to", "the format of $0.00 while amount_of_change < 0: amount_tendered = float(get_amount_tendered()) #If customer", "amount_tendered = float(get_amount_tendered()) #If customer enters 0, the the order is cancelled. The", "amount_tendered #This function displays the change given to the customer def display_change(total_bill,amount_tendered): \"\"\"", "variable \"difference\" is formatted to return as a float with two decimal points,", "'.2f')) print(\"Payment: $\", format(amount_tendered, '.2f')) print(\"----------------\\nChange: $\", display_change(amount_tendered, total_bill)) print(\"\\nThank you for shopping", "subtotal so far is: $\", format(subtotal_before_tax,'.2f')) #Once the loop is over and 0", "the program. if amount_tendered == 0: sys.exit(\"Thanks for shopping at MinMax! You have", "displays the change given to the customer def display_change(total_bill,amount_tendered): \"\"\" (float,float) -> float", "scan your items again.\") amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #If the amount tendered", "+ (diff / 2.0) return ret_val # In[3]: square_root(144) # In[4]: def display_welcome():", "abs(total_bill-amount_tendered) return (format(difference, '.2f')) #This function calculates the total cost as a running", "total cost as a running total def calculate_total_bill(subtotal): \"\"\" (float) -> float subtotal", "again. \"\"\" amount_tendered = input(\"Using the total displayed, please pay the complete amount", "'done' else: print (\"Oops! You entered an unrecognized Universal Price Code (UPC). Please", "(i.e. hits 0) while get_barcode(item)!= 'done': item = input(\"Scan your items that you", "your order. If you'd like to try again, please repeat the process and", "with two decimal points, including zeroes (i.e. 10.50 instead of 10.5). \"difference\" is", "if amount_of_change < 0: print(\"Sorry about that! You are short by: $\",format(abs(amount_of_change),'.2f'),\"Please try", "customer, it repeats until the full amount of the bill is paid then", "of the bill is paid then thanks the customer and provides a final", "amount_of_change > 0: print (\"\\nHere is your change!: $\",display_change(amount_tendered,total_bill)) display_totalbill() # In[ ]:", "5 >>> calculate_subtotal('242424') 19 \"\"\" subtotal_before_tax = 0 PRICE_SINGLE = 1 PRICE_SMALL =", "Price Codes (UPC) per product to scan def get_barcode(item_scanned): \"\"\" (str) -> str", "== UPC_SINGLE: subtotal_before_tax +=PRICE_SINGLE elif product_scanned == UPC_SMALL: subtotal_before_tax +=PRICE_SMALL elif product_scanned ==", "far will continue to accumulate and show on the screen for them to", "0 has been pressed (because 0 means done), the total price ('total_bill') after", "0.01 to 0.02 will round down to 0.00. 0. 03 to 0.04 will", "the following nickel rounding scheme standard rules in Canada: 0.01 to 0.02 will", "subtotal *HST_RATE return format(round(0.05 * round(float(total_bill)/0.05), 2), '.2f') #This function displays the final", "is prompted to try again to pay full amount if amount_of_change < 0:", "input prompt. If the cashier hits '0', the program is ended due to", "customer, or still owed to the MinMax store. The variable \"difference\" is formatted", "for them to view print(\"Your subtotal so far is: $\", format(subtotal_before_tax,'.2f')) #Once the", "= 19 UPC_SINGLE = '111111' UPC_SMALL = '666666' UPC_LARGE = '242424' if product_scanned", "$\", format(amount_tendered, '.2f')) print(\"----------------\\nChange: $\", display_change(amount_tendered, total_bill)) print(\"\\nThank you for shopping with MinMax!\")", "print functions for the total bill which includes: subtotal before tax, HST added", "elif item_scanned == '0': return 'done' else: print (\"Oops! You entered an unrecognized", "here, hit 0 when you're ready to finish up! \") subtotal_before_tax += calculate_subtotal(item)", "is shown to the customer #'total_bill' is rounded to the nearest 5 cents", "customer by using an input prompt. If the cashier hits '0', the program", "== UPC_SMALL: subtotal_before_tax +=PRICE_SMALL elif product_scanned == UPC_LARGE: subtotal_before_tax += PRICE_LARGE return subtotal_before_tax", "again and enter the full amount of $\",total_bill) #If the customer pays the", "the full amount owed on the total bill, the receipt of the total", "(format(difference, '.2f')) #This function calculates the total cost as a running total def", "as the amount tendered by the customer i.e. the 'amount_tendered' variable >>> get_amount_tendered(30)", "displayed, change is given to the customer and the program ends. elif amount_of_change", "ret_val*ret_val ret_val = ret_val + (diff / 2.0) return ret_val # In[3]: square_root(144)", "0.04 will round up to 0.05. 0.06 to 0.07 will round down to", "format(round(0.05 * round(float(total_bill)/0.05), 2), '.2f') #This function displays the final total bill def", "round down to 0.05. 0.08 to 0.09 will round up to 0.10 >>>", "subtotal_before_tax, '.2f')) print(\"Total price before rounding: $\", format(subtotal_before_tax * 1.13, '.2f')) print(\"Total price", "Thanks for shopping at MinMax! You have cancelled your order. If you'd like", "\"\"\" difference = abs(total_bill-amount_tendered) return (format(difference, '.2f')) #This function calculates the total cost", "and show on the screen for them to view print(\"Your subtotal so far", "using the nickel rounding scheme mentioned already total_bill = float(calculate_total_bill(subtotal_before_tax)) print(\"\\nAfter taxes, you", "scanning input, the cashier will continue to scan items until he is done", "#This function gets how much the customer gives i.e. input(\"enter your stuff\") def", ">>> display_welcome() 'Welcome to MinMax!' \"\"\" print(\"Welcome to MinMax!\") #This function is to", "format(subtotal_before_tax * 1.13, '.2f')) print(\"Total price after rounding: $\", format(total_bill, '.2f')) print(\"Payment: $\",", "calculate_total_bill(2.05) 2.30 \"\"\" HST_RATE = 1.13 total_bill = subtotal *HST_RATE return format(round(0.05 *", "receipt of the total bil is displayed, change is given to the customer", "to the customer #'total_bill' is rounded to the nearest 5 cents using the", "scheme standard rules in Canada: 0.01 to 0.02 will round down to 0.00.", "than the cost of the total bill, the customer is prompted to try", "decimal points, including zeroes (i.e. 10.50 instead of 10.5). \"difference\" is then rounded", "of this program or the value of the amount tendered by the customer", "the total bil is displayed, change is given to the customer and the", "round up to 0.05. 0.06 to 0.07 will round down to 0.05. 0.08", "continues, the customer's subtotal so far will continue to accumulate and show on", "amount if amount_of_change < 0: print(\"Sorry about that! You are short by: $\",format(abs(amount_of_change),'.2f'),\"Please", "== '666666': return 'SMALL' elif item_scanned == '242424': return 'LARGE' elif item_scanned ==", "5 cents using the nickel rounding scheme mentioned already total_bill = float(calculate_total_bill(subtotal_before_tax)) print(\"\\nAfter", "round up to 0.10 >>> display_change(10.0,7.97) 2.05 >>> display_change(10.5,2.0) 8.50 >>> display_change(10.7,1.4) 9.30", "item_scanned == '242424': return 'LARGE' elif item_scanned == '0': return 'done' else: print", "by the customer or given to the customer amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2)", "has been pressed (because 0 means done), the total price ('total_bill') after HST", "scan items until he is done (i.e. hits 0) while get_barcode(item)!= 'done': item", "owed by the customer or given to the customer amount_of_change = round(0.05*round(float(amount_tendered -", "nickel rounding scheme mentioned already total_bill = float(calculate_total_bill(subtotal_before_tax)) print(\"\\nAfter taxes, you owe: $\",format(round(0.05", "'Welcome to MinMax' >>> display_welcome() 'Welcome to MinMax!' \"\"\" print(\"Welcome to MinMax!\") #This", "float(calculate_total_bill(subtotal_before_tax)) print(\"\\nAfter taxes, you owe: $\",format(round(0.05 * round((total_bill) / 0.05), 2), '.2f')) #Sets", "utf-8 # In[2]: def square_root( sq_rand: float ) -> float: ret_val = sq_rand", "thus indicating how much change is owed to the customer, or still owed", "amount_tendered = 0 item = True #This displays the welcome sign to the", "the customer i.e. the 'amount_tendered' variable >>> get_amount_tendered(30) 30 >>> get_amount_tendered(40) 40 >>>get_amount_tendered(50)", "In[3]: square_root(144) # In[4]: def display_welcome(): \"\"\" Returns string 'Welcome to MinMax' >>>", "bill, the receipt of the total bil is displayed, change is given to", "square_root( sq_rand: float ) -> float: ret_val = sq_rand diff = sq_rand -", "1 PRICE_SMALL = 5 PRICE_LARGE = 19 UPC_SINGLE = '111111' UPC_SMALL = '666666'", "or still owed to the MinMax store. The variable \"difference\" is formatted to", "subtotal_before_tax +=PRICE_SMALL elif product_scanned == UPC_LARGE: subtotal_before_tax += PRICE_LARGE return subtotal_before_tax #This function", "The variable \"difference\" is formatted to return as a float with two decimal", "the screen for them to view print(\"Your subtotal so far is: $\", format(subtotal_before_tax,'.2f'))", "to 0.09 will round up to 0.10 >>> calculate_total_bill(3.0) 3.40 >>> calculate_total_bill(6.67) 7.55", "so far will continue to accumulate and show on the screen for them", "the amount tendered by the customer is less than the cost of the", "pays more than the full amount owed on the total bill, the receipt", "rounding, payment from the customer , any change owed to the customer and", "You are short by: $\",format(abs(amount_of_change),'.2f'),\"Please try again and enter the full amount of", "function displays the change given to the customer def display_change(total_bill,amount_tendered): \"\"\" (float,float) ->", "an appropriate UPC for your item\") #This function calculates the subtotal as a", "on the total bill, the receipt of the total bil is displayed, change", "print (\"Oops! You entered an unrecognized Universal Price Code (UPC). Please enter an", "and 0 has been pressed (because 0 means done), the total price ('total_bill')", "ends elif amount_of_change == 0: print (\"You've entered the full amount owed!\") display_totalbill()", "\"\"\" subtotal_before_tax = 0 PRICE_SINGLE = 1 PRICE_SMALL = 5 PRICE_LARGE = 19", "0.10 >>> display_change(10.0,7.97) 2.05 >>> display_change(10.5,2.0) 8.50 >>> display_change(10.7,1.4) 9.30 \"\"\" difference =", "format of $0.00 print(\"\\nHere is your bill! \\nSubtotal: $\", format(subtotal_before_tax, '.2f')) print(\"HST: $\",", "rounded to the nearest 5 cents using the nickel rounding scheme mentioned already", "bil is displayed, change is given to the customer and the program ends.", "scanned (i.e. the parameter \"product_scanned\")which is determined by using the UPC_SINGLE, UPC_SMALL and", "* 1.13, '.2f')) print(\"Total price after rounding: $\", format(total_bill, '.2f')) print(\"Payment: $\", format(amount_tendered,", "and formatted to 2 decimal points. Variable \"total\" is then rounded to the", "float: ret_val = sq_rand diff = sq_rand - ret_val*ret_val while abs(diff) > 0.000001:", "print(\"Your subtotal so far is: $\", format(subtotal_before_tax,'.2f')) #Once the loop is over and", "two decimal points, including zeroes (i.e. 10.50 instead of 10.5). \"difference\" is then", "$\",format(abs(amount_of_change),'.2f'),\"Please try again and enter the full amount of $\",total_bill) #If the customer", "each time an item is scanned def calculate_subtotal(product_scanned): \"\"\" (str) -> int Returns", "and the program ends elif amount_of_change == 0: print (\"You've entered the full", "he is done (i.e. hits 0) while get_barcode(item)!= 'done': item = input(\"Scan your", "variable \"total\", rounded and formatted to 2 decimal points. Variable \"total\" is then", "nearest 5 cents using the nickel rounding scheme mentioned already total_bill = float(calculate_total_bill(subtotal_before_tax))", "0.08 to 0.09 will round up to 0.10 >>> calculate_total_bill(3.0) 3.40 >>> calculate_total_bill(6.67)", "ready to finish up! \") subtotal_before_tax += calculate_subtotal(item) # As the loop continues,", "calculate_subtotal(item) # As the loop continues, the customer's subtotal so far will continue", "the customer def display_change(total_bill,amount_tendered): \"\"\" (float,float) -> float Returns the difference as the", "or the value of the amount tendered by the customer by using an", "displayed and the program ends elif amount_of_change == 0: print (\"You've entered the", "Price Code (UPC). Please enter an appropriate UPC for your item\") #This function", "#Sets the value for the amount of change either owed by the customer", "the customer provides any other value, this is captured as the amount tendered", "customer pays more than the full amount owed on the total bill, the", "0 when you're ready to finish up! \") subtotal_before_tax += calculate_subtotal(item) # As", "ret_val = sq_rand diff = sq_rand - ret_val*ret_val while abs(diff) > 0.000001: diff", "in this function is multiplied by inputted variable Function returns the resulting variable", "been pressed (because 0 means done), the total price ('total_bill') after HST is", "customer i.e. the 'amount_tendered' variable >>> get_amount_tendered(30) 30 >>> get_amount_tendered(40) 40 >>>get_amount_tendered(50) 50", "total_bill)) print(\"\\nThank you for shopping with MinMax!\") #The main function starts here if", "of change either owed by the customer or given to the customer amount_of_change", "program. if amount_tendered == 0: sys.exit(\"Thanks for shopping at MinMax! You have cancelled", "= '242424' if product_scanned == UPC_SINGLE: subtotal_before_tax +=PRICE_SINGLE elif product_scanned == UPC_SMALL: subtotal_before_tax", "is over and 0 has been pressed (because 0 means done), the total", "0.08 to 0.09 will round up to 0.10 >>> display_change(10.0,7.97) 2.05 >>> display_change(10.5,2.0)", "calculates the total cost as a running total def calculate_total_bill(subtotal): \"\"\" (float) ->", "customer or given to the customer amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #This while", "print(\"HST: $\", format(0.13 * subtotal_before_tax, '.2f')) print(\"Total price before rounding: $\", format(subtotal_before_tax *", "customer gives i.e. input(\"enter your stuff\") def get_amount_tendered(): \"\"\" Returns either the end", "+= calculate_subtotal(item) # As the loop continues, the customer's subtotal so far will", "float ) -> float: ret_val = sq_rand diff = sq_rand - ret_val*ret_val while", "before tax by using the price of the item that has been scanned", "if product_scanned == UPC_SINGLE: subtotal_before_tax +=PRICE_SINGLE elif product_scanned == UPC_SMALL: subtotal_before_tax +=PRICE_SMALL elif", "= abs(total_bill-amount_tendered) return (format(difference, '.2f')) #This function calculates the total cost as a", "(UPC) per product to scan def get_barcode(item_scanned): \"\"\" (str) -> str Returns the", "to the customer def display_change(total_bill,amount_tendered): \"\"\" (float,float) -> float Returns the difference as", "gives i.e. input(\"enter your stuff\") def get_amount_tendered(): \"\"\" Returns either the end of", "using the following nickel rounding scheme standard rules in Canada: 0.01 to 0.02", "total def calculate_total_bill(subtotal): \"\"\" (float) -> float subtotal is passed through as an", "greeting #All values returned are displayed with two decimal points in the format", "HST added to the bill, Total price before rounding to the nearest nickel,", "0: sys.exit(\"Thanks for shopping at MinMax! You have cancelled your order. If you'd", ">>>get_amount_tendered(0) Thanks for shopping at MinMax! You have cancelled your order. If you'd", "for shopping with MinMax!\") #The main function starts here if __name__ == \"__main__\":", "\\nSubtotal: $\", format(subtotal_before_tax, '.2f')) print(\"HST: $\", format(0.13 * subtotal_before_tax, '.2f')) print(\"Total price before", "= 1.13 total_bill = subtotal *HST_RATE return format(round(0.05 * round(float(total_bill)/0.05), 2), '.2f') #This", "the customer's purchase before tax by using the price of the item that", "0.05. 0.08 to 0.09 will round up to 0.10 >>> display_change(10.0,7.97) 2.05 >>>", "the amount tendered by the customer by using an input prompt. If the", "the MinMax Store display_welcome() #This while loop represents the scanning input, the cashier", "- total_bill)/0.05),2) #If the amount tendered by the customer is less than the", "in the upcoming loops subtotal_before_tax = 0 amount_tendered = 0 item = True", "of the total bill is displayed and the program ends elif amount_of_change ==", "#If the customer pays more than the full amount owed on the total", "'111111': return 'SINGLES' elif item_scanned == '666666': return 'SMALL' elif item_scanned == '242424':", "calculate_subtotal(product_scanned): \"\"\" (str) -> int Returns the subtotal of the customer's purchase before", "the nearest nickel, total price after rounding, payment from the customer , any", "the nearest 5 cents using the following nickel rounding scheme standard rules in", "- total_bill)/0.05),2) #This while loop represents the payment for the customer, it repeats", "(i.e. the parameter \"product_scanned\")which is determined by using the UPC_SINGLE, UPC_SMALL and UPC_LARGE", "\"\"\" print(\"Welcome to MinMax!\") #This function is to set values for Universal Price", "+=PRICE_SMALL elif product_scanned == UPC_LARGE: subtotal_before_tax += PRICE_LARGE return subtotal_before_tax #This function gets", "\"difference\" is formatted to return as a float with two decimal points, including", "2.30 \"\"\" HST_RATE = 1.13 total_bill = subtotal *HST_RATE return format(round(0.05 * round(float(total_bill)/0.05),", "0: return \"end\" else: return amount_tendered #This function displays the change given to", ">>>get_amount_tendered(50) 50 >>>get_amount_tendered(0) Thanks for shopping at MinMax! You have cancelled your order.", "customer can repeat the process again by re-running the program. if amount_tendered ==", "total_bill and amount_tendered, thus indicating how much change is owed to the customer,", "the cashier hits '0', the program is ended due to cancellation. If the", "item_scanned == '111111': return 'SINGLES' elif item_scanned == '666666': return 'SMALL' elif item_scanned", "the amount of change either owed by the customer or given to the", "again, please repeat the process and scan your items again. \"\"\" amount_tendered =", "to the customer, or still owed to the MinMax store. The variable \"difference\"", "#This function calculates the total cost as a running total def calculate_total_bill(subtotal): \"\"\"", "0: print(\"Sorry about that! You are short by: $\",format(abs(amount_of_change),'.2f'),\"Please try again and enter", "is captured as the amount tendered by the customer i.e. the 'amount_tendered' variable", "then thanks the customer and provides a final receipt #All values returned are", "try again and enter the full amount of $\",total_bill) #If the customer pays", "amount_tendered = input(\"Using the total displayed, please pay the complete amount owed via", "welcome sign to the MinMax Store display_welcome() #This while loop represents the scanning", "8.50 >>> display_change(10.7,1.4) 9.30 \"\"\" difference = abs(total_bill-amount_tendered) return (format(difference, '.2f')) #This function", "0.09 will round up to 0.10 >>> calculate_total_bill(3.0) 3.40 >>> calculate_total_bill(6.67) 7.55 >>>", "is cancelled. The customer can repeat the process again by re-running the program.", ">>>get_barcode('242424') 'LARGE' \"\"\" if item_scanned == '111111': return 'SINGLES' elif item_scanned == '666666':", "have cancelled your order. If you'd like to try again, please repeat the", "0 amount_tendered = 0 item = True #This displays the welcome sign to", "parameter \"product_scanned\")which is determined by using the UPC_SINGLE, UPC_SMALL and UPC_LARGE variables >>>", "amount_tendered == 0: sys.exit(\"Thanks for shopping at MinMax! You have cancelled your order.", "like to try again, please repeat the process and scan your items again.\")", "scan your items again. \"\"\" amount_tendered = input(\"Using the total displayed, please pay", "amount owed via cash only. If you'd like to cancel this purchase, just", "are short by: $\",format(abs(amount_of_change),'.2f'),\"Please try again and enter the full amount of $\",total_bill)", "to 0.02 will round down to 0.00. 0. 03 to 0.04 will round", "the final total bill def display_totalbill(): # Returns a series of print functions", "amount_of_change == 0: print (\"You've entered the full amount owed!\") display_totalbill() #If the", "this function is multiplied by inputted variable Function returns the resulting variable \"total\",", "display_welcome() #This while loop represents the scanning input, the cashier will continue to", "bill! \\nSubtotal: $\", format(subtotal_before_tax, '.2f')) print(\"HST: $\", format(0.13 * subtotal_before_tax, '.2f')) print(\"Total price", "screen for them to view print(\"Your subtotal so far is: $\", format(subtotal_before_tax,'.2f')) #Once", "product_scanned == UPC_LARGE: subtotal_before_tax += PRICE_LARGE return subtotal_before_tax #This function gets how much", "calculate_total_bill(subtotal): \"\"\" (float) -> float subtotal is passed through as an input HST_RATE", "diff = sq_rand - ret_val*ret_val while abs(diff) > 0.000001: diff = sq_rand -", "by the customer i.e. the 'amount_tendered' variable >>> get_amount_tendered(30) 30 >>> get_amount_tendered(40) 40", "appropriate UPC for your item\") #This function calculates the subtotal as a running", "'111111' UPC_SMALL = '666666' UPC_LARGE = '242424' if product_scanned == UPC_SINGLE: subtotal_before_tax +=PRICE_SINGLE", "(float,float) -> float Returns the difference as the variable \"difference\" in value between", "type of item that has been scanned based on the UPC of the", "\"difference\" is then rounded to the nearest 5 cents using the following nickel", "enters 0, the the order is cancelled. The customer can repeat the process", "HST_RATE = 1.13 total_bill = subtotal *HST_RATE return format(round(0.05 * round(float(total_bill)/0.05), 2), '.2f')", "used in the upcoming loops subtotal_before_tax = 0 amount_tendered = 0 item =", "the program ends. elif amount_of_change > 0: print (\"\\nHere is your change!: $\",display_change(amount_tendered,total_bill))", "#The main function starts here if __name__ == \"__main__\": #Sets the values of", "customer's purchase before tax by using the price of the item that has", "items until he is done (i.e. hits 0) while get_barcode(item)!= 'done': item =", "you're ready to finish up! \") subtotal_before_tax += calculate_subtotal(item) # As the loop", "owed via cash only. If you'd like to cancel this purchase, just hit", "HST is shown to the customer #'total_bill' is rounded to the nearest 5", "as the variable \"difference\" in value between total_bill and amount_tendered, thus indicating how", "amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #This while loop represents the payment for the", "$\",total_bill) #If the customer pays the full amount owed on the total bill,", "total_bill)/0.05),2) #If the amount tendered by the customer is less than the cost", "hits '0', the program is ended due to cancellation. If the customer provides", "ends. elif amount_of_change > 0: print (\"\\nHere is your change!: $\",display_change(amount_tendered,total_bill)) display_totalbill() #", "to 0.05. 0.06 to 0.07 will round down to 0.05. 0.08 to 0.09", "sq_rand diff = sq_rand - ret_val*ret_val while abs(diff) > 0.000001: diff = sq_rand", ">>>get_barcode('666666') 'SMALL' >>>get_barcode('242424') 'LARGE' \"\"\" if item_scanned == '111111': return 'SINGLES' elif item_scanned", "to the customer amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #This while loop represents the", "\"total\", rounded and formatted to 2 decimal points. Variable \"total\" is then rounded", "function calculates the subtotal as a running total, which updates each time an", "to cancellation. If the customer provides any other value, this is captured as", "python # coding: utf-8 # In[2]: def square_root( sq_rand: float ) -> float:", "for shopping at MinMax! You have cancelled your order. If you'd like to", "again to pay full amount if amount_of_change < 0: print(\"Sorry about that! You", "If you'd like to cancel this purchase, just hit 0 again.\") if amount_tendered", "$0.00 while amount_of_change < 0: amount_tendered = float(get_amount_tendered()) #If customer enters 0, the", "nickel rounding scheme standard rules in Canada: 0.01 to 0.02 will round down", "formatted to 2 decimal points. Variable \"total\" is then rounded to the nearest", "total_bill = float(calculate_total_bill(subtotal_before_tax)) print(\"\\nAfter taxes, you owe: $\",format(round(0.05 * round((total_bill) / 0.05), 2),", "customer amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #This while loop represents the payment for", "subtotal is passed through as an input HST_RATE variable in this function is", "cancel this purchase, just hit 0 again.\") if amount_tendered == 0: return \"end\"", "you'd like to try again, please repeat the process and scan your items", "to the customer and the program ends. elif amount_of_change > 0: print (\"\\nHere", "has been scanned based on the UPC of the item that has been", "the subtotal as a running total, which updates each time an item is", "repeat the process and scan your items again.\") amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2)", "is multiplied by inputted variable Function returns the resulting variable \"total\", rounded and", "the program ends elif amount_of_change == 0: print (\"You've entered the full amount", "display_change(10.7,1.4) 9.30 \"\"\" difference = abs(total_bill-amount_tendered) return (format(difference, '.2f')) #This function calculates the", "9.30 \"\"\" difference = abs(total_bill-amount_tendered) return (format(difference, '.2f')) #This function calculates the total", "i.e. the 'amount_tendered' variable >>> get_amount_tendered(30) 30 >>> get_amount_tendered(40) 40 >>>get_amount_tendered(50) 50 >>>get_amount_tendered(0)", "as a float with two decimal points, including zeroes (i.e. 10.50 instead of", "return 'done' else: print (\"Oops! You entered an unrecognized Universal Price Code (UPC).", "print(\"Total price after rounding: $\", format(total_bill, '.2f')) print(\"Payment: $\", format(amount_tendered, '.2f')) print(\"----------------\\nChange: $\",", "sys.exit(\"Thanks for shopping at MinMax! You have cancelled your order. If you'd like", "tendered by the customer i.e. the 'amount_tendered' variable >>> get_amount_tendered(30) 30 >>> get_amount_tendered(40)", "variables >>> calculate_subtotal('111111') 1 >>> calculate_subtotal('666666') 5 >>> calculate_subtotal('242424') 19 \"\"\" subtotal_before_tax =", "subtotal of the customer's purchase before tax by using the price of the", ">>> display_change(10.5,2.0) 8.50 >>> display_change(10.7,1.4) 9.30 \"\"\" difference = abs(total_bill-amount_tendered) return (format(difference, '.2f'))", "passed through as an input HST_RATE variable in this function is multiplied by", "subtotal so far will continue to accumulate and show on the screen for", "in value between total_bill and amount_tendered, thus indicating how much change is owed", "== '111111': return 'SINGLES' elif item_scanned == '666666': return 'SMALL' elif item_scanned ==", "customer provides any other value, this is captured as the amount tendered by", ">>> calculate_subtotal('666666') 5 >>> calculate_subtotal('242424') 19 \"\"\" subtotal_before_tax = 0 PRICE_SINGLE = 1", "like to purchase here, hit 0 when you're ready to finish up! \")", "over and 0 has been pressed (because 0 means done), the total price", "UPC_SMALL = '666666' UPC_LARGE = '242424' if product_scanned == UPC_SINGLE: subtotal_before_tax +=PRICE_SINGLE elif", "as a running total, which updates each time an item is scanned def", "variable Function returns the resulting variable \"total\", rounded and formatted to 2 decimal", "try again to pay full amount if amount_of_change < 0: print(\"Sorry about that!", "print(\"----------------\\nChange: $\", display_change(amount_tendered, total_bill)) print(\"\\nThank you for shopping with MinMax!\") #The main function", "your items again.\") amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #If the amount tendered by", "Universal Price Codes (UPC) per product to scan def get_barcode(item_scanned): \"\"\" (str) ->", ">>> get_amount_tendered(40) 40 >>>get_amount_tendered(50) 50 >>>get_amount_tendered(0) Thanks for shopping at MinMax! You have", "= subtotal *HST_RATE return format(round(0.05 * round(float(total_bill)/0.05), 2), '.2f') #This function displays the", "display_welcome() 'Welcome to MinMax!' \"\"\" print(\"Welcome to MinMax!\") #This function is to set", "$\", format(subtotal_before_tax * 1.13, '.2f')) print(\"Total price after rounding: $\", format(total_bill, '.2f')) print(\"Payment:", "the price of the item that has been scanned (i.e. the parameter \"product_scanned\")which", "UPC_SINGLE = '111111' UPC_SMALL = '666666' UPC_LARGE = '242424' if product_scanned == UPC_SINGLE:", "before rounding: $\", format(subtotal_before_tax * 1.13, '.2f')) print(\"Total price after rounding: $\", format(total_bill,", "rounding scheme mentioned already total_bill = float(calculate_total_bill(subtotal_before_tax)) print(\"\\nAfter taxes, you owe: $\",format(round(0.05 *", "cost of the total bill, the customer is prompted to try again to", "to 2 decimal points. Variable \"total\" is then rounded to the nearest 5", "customer pays the full amount owed on the total bill, the receipt of", "sq_rand: float ) -> float: ret_val = sq_rand diff = sq_rand - ret_val*ret_val", "the total displayed, please pay the complete amount owed via cash only. If", "either owed by the customer or given to the customer amount_of_change = round(0.05*round(float(amount_tendered", "is ended due to cancellation. If the customer provides any other value, this", "-> float Returns the difference as the variable \"difference\" in value between total_bill", "The customer can repeat the process again by re-running the program. if amount_tendered", "$\", format(total_bill, '.2f')) print(\"Payment: $\", format(amount_tendered, '.2f')) print(\"----------------\\nChange: $\", display_change(amount_tendered, total_bill)) print(\"\\nThank you", "input, the cashier will continue to scan items until he is done (i.e.", "on the screen for them to view print(\"Your subtotal so far is: $\",", "in the format of $0.00 print(\"\\nHere is your bill! \\nSubtotal: $\", format(subtotal_before_tax, '.2f'))", "> 0.000001: diff = sq_rand - ret_val*ret_val ret_val = ret_val + (diff /", "mentioned already total_bill = float(calculate_total_bill(subtotal_before_tax)) print(\"\\nAfter taxes, you owe: $\",format(round(0.05 * round((total_bill) /", "total_bill = subtotal *HST_RATE return format(round(0.05 * round(float(total_bill)/0.05), 2), '.2f') #This function displays", "to purchase here, hit 0 when you're ready to finish up! \") subtotal_before_tax", "of item that has been scanned based on the UPC of the item", "while loop represents the scanning input, the cashier will continue to scan items", "UPC_SINGLE: subtotal_before_tax +=PRICE_SINGLE elif product_scanned == UPC_SMALL: subtotal_before_tax +=PRICE_SMALL elif product_scanned == UPC_LARGE:", "instead of 10.5). \"difference\" is then rounded to the nearest 5 cents using", "after rounding, payment from the customer , any change owed to the customer", "any change owed to the customer and a farewell greeting #All values returned", "decimal points in the format of $0.00 while amount_of_change < 0: amount_tendered =", "for Universal Price Codes (UPC) per product to scan def get_barcode(item_scanned): \"\"\" (str)", "to scan def get_barcode(item_scanned): \"\"\" (str) -> str Returns the type of item", "#If customer enters 0, the the order is cancelled. The customer can repeat", "the total price ('total_bill') after HST is shown to the customer #'total_bill' is", "payment from the customer , any change owed to the customer and a", "Returns the subtotal of the customer's purchase before tax by using the price", "== UPC_LARGE: subtotal_before_tax += PRICE_LARGE return subtotal_before_tax #This function gets how much the", "In[4]: def display_welcome(): \"\"\" Returns string 'Welcome to MinMax' >>> display_welcome() 'Welcome to", "as an input HST_RATE variable in this function is multiplied by inputted variable", "the customer , any change owed to the customer and a farewell greeting", "already total_bill = float(calculate_total_bill(subtotal_before_tax)) print(\"\\nAfter taxes, you owe: $\",format(round(0.05 * round((total_bill) / 0.05),", "customer #'total_bill' is rounded to the nearest 5 cents using the nickel rounding", "view print(\"Your subtotal so far is: $\", format(subtotal_before_tax,'.2f')) #Once the loop is over", "product_scanned == UPC_SINGLE: subtotal_before_tax +=PRICE_SINGLE elif product_scanned == UPC_SMALL: subtotal_before_tax +=PRICE_SMALL elif product_scanned", "a float with two decimal points, including zeroes (i.e. 10.50 instead of 10.5).", "the parameter \"product_scanned\")which is determined by using the UPC_SINGLE, UPC_SMALL and UPC_LARGE variables", "display_change(10.0,7.97) 2.05 >>> display_change(10.5,2.0) 8.50 >>> display_change(10.7,1.4) 9.30 \"\"\" difference = abs(total_bill-amount_tendered) return", "$\", format(0.13 * subtotal_before_tax, '.2f')) print(\"Total price before rounding: $\", format(subtotal_before_tax * 1.13,", "continue to accumulate and show on the screen for them to view print(\"Your", "#This function displays the change given to the customer def display_change(total_bill,amount_tendered): \"\"\" (float,float)", "receipt of the total bill is displayed and the program ends elif amount_of_change", "0: amount_tendered = float(get_amount_tendered()) #If customer enters 0, the the order is cancelled.", "input(\"enter your stuff\") def get_amount_tendered(): \"\"\" Returns either the end of this program", "of the item that has been scanned (i.e. the parameter \"product_scanned\")which is determined", "price after rounding: $\", format(total_bill, '.2f')) print(\"Payment: $\", format(amount_tendered, '.2f')) print(\"----------------\\nChange: $\", display_change(amount_tendered,", "to the customer and a farewell greeting #All values returned are displayed with", "float(get_amount_tendered()) #If customer enters 0, the the order is cancelled. The customer can", "prompted to try again to pay full amount if amount_of_change < 0: print(\"Sorry", "that has been scanned i.e. the \"item_scanned\" parameter >>>get_barcode('111111') 'SINGLES' >>>get_barcode('666666') 'SMALL' >>>get_barcode('242424')", "item_scanned == '666666': return 'SMALL' elif item_scanned == '242424': return 'LARGE' elif item_scanned", "finish up! \") subtotal_before_tax += calculate_subtotal(item) # As the loop continues, the customer's", "difference as the variable \"difference\" in value between total_bill and amount_tendered, thus indicating", "to try again to pay full amount if amount_of_change < 0: print(\"Sorry about", "the customer pays the full amount owed on the total bill, the receipt", "'Welcome to MinMax!' \"\"\" print(\"Welcome to MinMax!\") #This function is to set values", "rounded to the nearest 5 cents using the following nickel rounding scheme standard", "down to 0.00. 0. 03 to 0.04 will round up to 0.05. 0.06", "3.40 >>> calculate_total_bill(6.67) 7.55 >>> calculate_total_bill(2.05) 2.30 \"\"\" HST_RATE = 1.13 total_bill =", "MinMax!\") #This function is to set values for Universal Price Codes (UPC) per", "is rounded to the nearest 5 cents using the nickel rounding scheme mentioned", "represents the payment for the customer, it repeats until the full amount of", "more than the full amount owed on the total bill, the receipt of", "the value for the amount of change either owed by the customer or", "than the full amount owed on the total bill, the receipt of the", "the total bill, the receipt of the total bil is displayed, change is", "\"\"\" (str) -> int Returns the subtotal of the customer's purchase before tax", "tendered by the customer by using an input prompt. If the cashier hits", "starts here if __name__ == \"__main__\": #Sets the values of subtotal_before_tax and item", "the customer is prompted to try again to pay full amount if amount_of_change", "float with two decimal points, including zeroes (i.e. 10.50 instead of 10.5). \"difference\"", "-> str Returns the type of item that has been scanned based on", "total bill which includes: subtotal before tax, HST added to the bill, Total", "change either owed by the customer or given to the customer amount_of_change =", "= 1 PRICE_SMALL = 5 PRICE_LARGE = 19 UPC_SINGLE = '111111' UPC_SMALL =", "difference = abs(total_bill-amount_tendered) return (format(difference, '.2f')) #This function calculates the total cost as", "is to set values for Universal Price Codes (UPC) per product to scan", "up! \") subtotal_before_tax += calculate_subtotal(item) # As the loop continues, the customer's subtotal", "\"\"\" amount_tendered = input(\"Using the total displayed, please pay the complete amount owed", "due to cancellation. If the customer provides any other value, this is captured", "done (i.e. hits 0) while get_barcode(item)!= 'done': item = input(\"Scan your items that", "returned are displayed with two decimal points in the format of $0.00 while", "return subtotal_before_tax #This function gets how much the customer gives i.e. input(\"enter your", "as a running total def calculate_total_bill(subtotal): \"\"\" (float) -> float subtotal is passed", "= 5 PRICE_LARGE = 19 UPC_SINGLE = '111111' UPC_SMALL = '666666' UPC_LARGE =", "the total bill which includes: subtotal before tax, HST added to the bill,", "if __name__ == \"__main__\": #Sets the values of subtotal_before_tax and item to be", "== 0: sys.exit(\"Thanks for shopping at MinMax! You have cancelled your order. If", "owed!\") display_totalbill() #If the customer pays more than the full amount owed on", "else: return amount_tendered #This function displays the change given to the customer def", "your stuff\") def get_amount_tendered(): \"\"\" Returns either the end of this program or", "set values for Universal Price Codes (UPC) per product to scan def get_barcode(item_scanned):", "points, including zeroes (i.e. 10.50 instead of 10.5). \"difference\" is then rounded to", "bill def display_totalbill(): # Returns a series of print functions for the total", "amount tendered by the customer is less than the cost of the total", "\"\"\" HST_RATE = 1.13 total_bill = subtotal *HST_RATE return format(round(0.05 * round(float(total_bill)/0.05), 2),", "try again, please repeat the process and scan your items again.\") amount_of_change =", "rounding to the nearest nickel, total price after rounding, payment from the customer", "PRICE_SINGLE = 1 PRICE_SMALL = 5 PRICE_LARGE = 19 UPC_SINGLE = '111111' UPC_SMALL", "\"item_scanned\" parameter >>>get_barcode('111111') 'SINGLES' >>>get_barcode('666666') 'SMALL' >>>get_barcode('242424') 'LARGE' \"\"\" if item_scanned == '111111':", "function gets how much the customer gives i.e. input(\"enter your stuff\") def get_amount_tendered():", "be used in the upcoming loops subtotal_before_tax = 0 amount_tendered = 0 item", "total price ('total_bill') after HST is shown to the customer #'total_bill' is rounded", "to be used in the upcoming loops subtotal_before_tax = 0 amount_tendered = 0", "input(\"Scan your items that you would like to purchase here, hit 0 when", "are displayed with two decimal points in the format of $0.00 while amount_of_change", "tax by using the price of the item that has been scanned (i.e.", "ended due to cancellation. If the customer provides any other value, this is", "'LARGE' \"\"\" if item_scanned == '111111': return 'SINGLES' elif item_scanned == '666666': return", "print(\"Payment: $\", format(amount_tendered, '.2f')) print(\"----------------\\nChange: $\", display_change(amount_tendered, total_bill)) print(\"\\nThank you for shopping with", "to MinMax!' \"\"\" print(\"Welcome to MinMax!\") #This function is to set values for", "*HST_RATE return format(round(0.05 * round(float(total_bill)/0.05), 2), '.2f') #This function displays the final total", "\"__main__\": #Sets the values of subtotal_before_tax and item to be used in the", "hits 0) while get_barcode(item)!= 'done': item = input(\"Scan your items that you would", "return as a float with two decimal points, including zeroes (i.e. 10.50 instead", "else: print (\"Oops! You entered an unrecognized Universal Price Code (UPC). Please enter", "with two decimal points in the format of $0.00 print(\"\\nHere is your bill!", "round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #If the amount tendered by the customer is less than", "indicating how much change is owed to the customer, or still owed to", "7.55 >>> calculate_total_bill(2.05) 2.30 \"\"\" HST_RATE = 1.13 total_bill = subtotal *HST_RATE return", "for the total bill which includes: subtotal before tax, HST added to the", "after rounding: $\", format(total_bill, '.2f')) print(\"Payment: $\", format(amount_tendered, '.2f')) print(\"----------------\\nChange: $\", display_change(amount_tendered, total_bill))", "scan def get_barcode(item_scanned): \"\"\" (str) -> str Returns the type of item that", "change given to the customer def display_change(total_bill,amount_tendered): \"\"\" (float,float) -> float Returns the", "value between total_bill and amount_tendered, thus indicating how much change is owed to", "has been scanned i.e. the \"item_scanned\" parameter >>>get_barcode('111111') 'SINGLES' >>>get_barcode('666666') 'SMALL' >>>get_barcode('242424') 'LARGE'", "purchase here, hit 0 when you're ready to finish up! \") subtotal_before_tax +=", "prompt. If the cashier hits '0', the program is ended due to cancellation.", "$\",format(round(0.05 * round((total_bill) / 0.05), 2), '.2f')) #Sets the value for the amount", "nickel, total price after rounding, payment from the customer , any change owed", "19 UPC_SINGLE = '111111' UPC_SMALL = '666666' UPC_LARGE = '242424' if product_scanned ==", "subtotal_before_tax +=PRICE_SINGLE elif product_scanned == UPC_SMALL: subtotal_before_tax +=PRICE_SMALL elif product_scanned == UPC_LARGE: subtotal_before_tax", "the customer by using an input prompt. If the cashier hits '0', the", "\"end\" else: return amount_tendered #This function displays the change given to the customer", "display_change(10.5,2.0) 8.50 >>> display_change(10.7,1.4) 9.30 \"\"\" difference = abs(total_bill-amount_tendered) return (format(difference, '.2f')) #This", "price before rounding to the nearest nickel, total price after rounding, payment from", "the customer, or still owed to the MinMax store. The variable \"difference\" is", "variable in this function is multiplied by inputted variable Function returns the resulting", "would like to purchase here, hit 0 when you're ready to finish up!", "amount owed on the total bill, the receipt of the total bill is", "is paid then thanks the customer and provides a final receipt #All values", "amount owed on the total bill, the receipt of the total bil is", "tendered by the customer is less than the cost of the total bill,", "this program or the value of the amount tendered by the customer by", "running total def calculate_total_bill(subtotal): \"\"\" (float) -> float subtotal is passed through as", "$\", display_change(amount_tendered, total_bill)) print(\"\\nThank you for shopping with MinMax!\") #The main function starts", "2), '.2f')) #Sets the value for the amount of change either owed by", "to the MinMax store. The variable \"difference\" is formatted to return as a", "to 0.10 >>> calculate_total_bill(3.0) 3.40 >>> calculate_total_bill(6.67) 7.55 >>> calculate_total_bill(2.05) 2.30 \"\"\" HST_RATE", "return 'SINGLES' elif item_scanned == '666666': return 'SMALL' elif item_scanned == '242424': return", "cents using the following nickel rounding scheme standard rules in Canada: 0.01 to", "the item that has been scanned (i.e. the parameter \"product_scanned\")which is determined by", "print(\"\\nHere is your bill! \\nSubtotal: $\", format(subtotal_before_tax, '.2f')) print(\"HST: $\", format(0.13 * subtotal_before_tax,", "you would like to purchase here, hit 0 when you're ready to finish", "running total, which updates each time an item is scanned def calculate_subtotal(product_scanned): \"\"\"", "Returns the difference as the variable \"difference\" in value between total_bill and amount_tendered,", "added to the bill, Total price before rounding to the nearest nickel, total", "program is ended due to cancellation. If the customer provides any other value,", "of the total bil is displayed, change is given to the customer and", "the UPC_SINGLE, UPC_SMALL and UPC_LARGE variables >>> calculate_subtotal('111111') 1 >>> calculate_subtotal('666666') 5 >>>", "MinMax' >>> display_welcome() 'Welcome to MinMax!' \"\"\" print(\"Welcome to MinMax!\") #This function is", "calculate_subtotal('111111') 1 >>> calculate_subtotal('666666') 5 >>> calculate_subtotal('242424') 19 \"\"\" subtotal_before_tax = 0 PRICE_SINGLE", "by re-running the program. if amount_tendered == 0: sys.exit(\"Thanks for shopping at MinMax!", "(\"You've entered the full amount owed!\") display_totalbill() #If the customer pays more than", "full amount owed!\") display_totalbill() #If the customer pays more than the full amount", "round(float(total_bill)/0.05), 2), '.2f') #This function displays the final total bill def display_totalbill(): #", "of $0.00 while amount_of_change < 0: amount_tendered = float(get_amount_tendered()) #If customer enters 0,", "return format(round(0.05 * round(float(total_bill)/0.05), 2), '.2f') #This function displays the final total bill", "of 10.5). \"difference\" is then rounded to the nearest 5 cents using the", "change is owed to the customer, or still owed to the MinMax store.", "- ret_val*ret_val ret_val = ret_val + (diff / 2.0) return ret_val # In[3]:", "format(subtotal_before_tax, '.2f')) print(\"HST: $\", format(0.13 * subtotal_before_tax, '.2f')) print(\"Total price before rounding: $\",", "5 cents using the following nickel rounding scheme standard rules in Canada: 0.01", "total_bill)/0.05),2) #This while loop represents the payment for the customer, it repeats until", "the variable \"difference\" in value between total_bill and amount_tendered, thus indicating how much", "parameter >>>get_barcode('111111') 'SINGLES' >>>get_barcode('666666') 'SMALL' >>>get_barcode('242424') 'LARGE' \"\"\" if item_scanned == '111111': return", "gets how much the customer gives i.e. input(\"enter your stuff\") def get_amount_tendered(): \"\"\"", "def display_totalbill(): # Returns a series of print functions for the total bill", "items that you would like to purchase here, hit 0 when you're ready", "display_totalbill(): # Returns a series of print functions for the total bill which", "calculate_subtotal('666666') 5 >>> calculate_subtotal('242424') 19 \"\"\" subtotal_before_tax = 0 PRICE_SINGLE = 1 PRICE_SMALL", "process and scan your items again. \"\"\" amount_tendered = input(\"Using the total displayed,", "up to 0.05. 0.06 to 0.07 will round down to 0.05. 0.08 to", "short by: $\",format(abs(amount_of_change),'.2f'),\"Please try again and enter the full amount of $\",total_bill) #If", "using the UPC_SINGLE, UPC_SMALL and UPC_LARGE variables >>> calculate_subtotal('111111') 1 >>> calculate_subtotal('666666') 5", "stuff\") def get_amount_tendered(): \"\"\" Returns either the end of this program or the", "cost as a running total def calculate_total_bill(subtotal): \"\"\" (float) -> float subtotal is", "to the nearest nickel, total price after rounding, payment from the customer ,", "round down to 0.00. 0. 03 to 0.04 will round up to 0.05.", "= round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #This while loop represents the payment for the customer,", "elif item_scanned == '242424': return 'LARGE' elif item_scanned == '0': return 'done' else:", ", any change owed to the customer and a farewell greeting #All values", "continue to scan items until he is done (i.e. hits 0) while get_barcode(item)!=", "the loop is over and 0 has been pressed (because 0 means done),", "the scanning input, the cashier will continue to scan items until he is", "either the end of this program or the value of the amount tendered", "the nearest 5 cents using the nickel rounding scheme mentioned already total_bill =", "the customer and the program ends. elif amount_of_change > 0: print (\"\\nHere is", "until he is done (i.e. hits 0) while get_barcode(item)!= 'done': item = input(\"Scan", "print(\"Sorry about that! You are short by: $\",format(abs(amount_of_change),'.2f'),\"Please try again and enter the", "is determined by using the UPC_SINGLE, UPC_SMALL and UPC_LARGE variables >>> calculate_subtotal('111111') 1", "item_scanned == '0': return 'done' else: print (\"Oops! You entered an unrecognized Universal", "0, the the order is cancelled. The customer can repeat the process again", "input HST_RATE variable in this function is multiplied by inputted variable Function returns", "please pay the complete amount owed via cash only. If you'd like to", "by the customer is less than the cost of the total bill, the", "hit 0 when you're ready to finish up! \") subtotal_before_tax += calculate_subtotal(item) #", "'SINGLES' >>>get_barcode('666666') 'SMALL' >>>get_barcode('242424') 'LARGE' \"\"\" if item_scanned == '111111': return 'SINGLES' elif", "0 means done), the total price ('total_bill') after HST is shown to the", "0.00. 0. 03 to 0.04 will round up to 0.05. 0.06 to 0.07", "'.2f')) print(\"----------------\\nChange: $\", display_change(amount_tendered, total_bill)) print(\"\\nThank you for shopping with MinMax!\") #The main", "amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #If the amount tendered by the customer is", "pay the complete amount owed via cash only. If you'd like to cancel", "in the format of $0.00 while amount_of_change < 0: amount_tendered = float(get_amount_tendered()) #If", "owed to the customer and a farewell greeting #All values returned are displayed", "in Canada: 0.01 to 0.02 will round down to 0.00. 0. 03 to", "show on the screen for them to view print(\"Your subtotal so far is:", "display_totalbill() #If the customer pays more than the full amount owed on the", "'.2f')) print(\"Total price before rounding: $\", format(subtotal_before_tax * 1.13, '.2f')) print(\"Total price after", "calculates the subtotal as a running total, which updates each time an item", "this is captured as the amount tendered by the customer i.e. the 'amount_tendered'", "to set values for Universal Price Codes (UPC) per product to scan def", "0 PRICE_SINGLE = 1 PRICE_SMALL = 5 PRICE_LARGE = 19 UPC_SINGLE = '111111'", "return amount_tendered #This function displays the change given to the customer def display_change(total_bill,amount_tendered):", "purchase, just hit 0 again.\") if amount_tendered == 0: return \"end\" else: return", "def calculate_total_bill(subtotal): \"\"\" (float) -> float subtotal is passed through as an input", "is passed through as an input HST_RATE variable in this function is multiplied", "subtotal_before_tax and item to be used in the upcoming loops subtotal_before_tax = 0", "the loop continues, the customer's subtotal so far will continue to accumulate and", "complete amount owed via cash only. If you'd like to cancel this purchase,", "elif product_scanned == UPC_SMALL: subtotal_before_tax +=PRICE_SMALL elif product_scanned == UPC_LARGE: subtotal_before_tax += PRICE_LARGE", "pay full amount if amount_of_change < 0: print(\"Sorry about that! You are short", "again.\") amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #If the amount tendered by the customer", "customer is less than the cost of the total bill, the customer is", "= sq_rand - ret_val*ret_val while abs(diff) > 0.000001: diff = sq_rand - ret_val*ret_val", "the process again by re-running the program. if amount_tendered == 0: sys.exit(\"Thanks for", "the bill, Total price before rounding to the nearest nickel, total price after", "print(\"\\nAfter taxes, you owe: $\",format(round(0.05 * round((total_bill) / 0.05), 2), '.2f')) #Sets the", "displayed with two decimal points in the format of $0.00 while amount_of_change <", "displayed with two decimal points in the format of $0.00 print(\"\\nHere is your", "'.2f')) #Sets the value for the amount of change either owed by the", "customer and provides a final receipt #All values returned are displayed with two", "price of the item that has been scanned (i.e. the parameter \"product_scanned\")which is", "def display_welcome(): \"\"\" Returns string 'Welcome to MinMax' >>> display_welcome() 'Welcome to MinMax!'", "rounding: $\", format(subtotal_before_tax * 1.13, '.2f')) print(\"Total price after rounding: $\", format(total_bill, '.2f'))", "i.e. input(\"enter your stuff\") def get_amount_tendered(): \"\"\" Returns either the end of this", "the full amount of $\",total_bill) #If the customer pays the full amount owed", "get_amount_tendered(30) 30 >>> get_amount_tendered(40) 40 >>>get_amount_tendered(50) 50 >>>get_amount_tendered(0) Thanks for shopping at MinMax!", "2.0) return ret_val # In[3]: square_root(144) # In[4]: def display_welcome(): \"\"\" Returns string", "cancelled your order. If you'd like to try again, please repeat the process", "the change given to the customer def display_change(total_bill,amount_tendered): \"\"\" (float,float) -> float Returns", "float subtotal is passed through as an input HST_RATE variable in this function", "item to be used in the upcoming loops subtotal_before_tax = 0 amount_tendered =", "the cashier will continue to scan items until he is done (i.e. hits", "return 'SMALL' elif item_scanned == '242424': return 'LARGE' elif item_scanned == '0': return", "the nickel rounding scheme mentioned already total_bill = float(calculate_total_bill(subtotal_before_tax)) print(\"\\nAfter taxes, you owe:", "is displayed and the program ends elif amount_of_change == 0: print (\"You've entered", "40 >>>get_amount_tendered(50) 50 >>>get_amount_tendered(0) Thanks for shopping at MinMax! You have cancelled your", "with MinMax!\") #The main function starts here if __name__ == \"__main__\": #Sets the", "process and scan your items again.\") amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #If the", "full amount owed on the total bill, the receipt of the total bil", "owed to the customer, or still owed to the MinMax store. The variable", "bill, Total price before rounding to the nearest nickel, total price after rounding,", "\"\"\" (float,float) -> float Returns the difference as the variable \"difference\" in value", "the the order is cancelled. The customer can repeat the process again by", "customer and the program ends. elif amount_of_change > 0: print (\"\\nHere is your", "03 to 0.04 will round up to 0.05. 0.06 to 0.07 will round", "print(\"Welcome to MinMax!\") #This function is to set values for Universal Price Codes", "the amount tendered by the customer i.e. the 'amount_tendered' variable >>> get_amount_tendered(30) 30", "of the amount tendered by the customer by using an input prompt. If", "You entered an unrecognized Universal Price Code (UPC). Please enter an appropriate UPC", "#This function displays the final total bill def display_totalbill(): # Returns a series", "0.05), 2), '.2f')) #Sets the value for the amount of change either owed", "amount tendered by the customer by using an input prompt. If the cashier", "a running total def calculate_total_bill(subtotal): \"\"\" (float) -> float subtotal is passed through", "amount of $\",total_bill) #If the customer pays the full amount owed on the", "multiplied by inputted variable Function returns the resulting variable \"total\", rounded and formatted", "* round((total_bill) / 0.05), 2), '.2f')) #Sets the value for the amount of", "owed on the total bill, the receipt of the total bil is displayed,", "= '111111' UPC_SMALL = '666666' UPC_LARGE = '242424' if product_scanned == UPC_SINGLE: subtotal_before_tax", "#'total_bill' is rounded to the nearest 5 cents using the nickel rounding scheme", "the type of item that has been scanned based on the UPC of", "of $\",total_bill) #If the customer pays the full amount owed on the total", "print (\"You've entered the full amount owed!\") display_totalbill() #If the customer pays more", "end of this program or the value of the amount tendered by the", "loop is over and 0 has been pressed (because 0 means done), the", "by using the UPC_SINGLE, UPC_SMALL and UPC_LARGE variables >>> calculate_subtotal('111111') 1 >>> calculate_subtotal('666666')", "the customer amount_of_change = round(0.05*round(float(amount_tendered - total_bill)/0.05),2) #This while loop represents the payment", "the customer, it repeats until the full amount of the bill is paid", ">>> display_change(10.0,7.97) 2.05 >>> display_change(10.5,2.0) 8.50 >>> display_change(10.7,1.4) 9.30 \"\"\" difference = abs(total_bill-amount_tendered)", "display_change(total_bill,amount_tendered): \"\"\" (float,float) -> float Returns the difference as the variable \"difference\" in", "you owe: $\",format(round(0.05 * round((total_bill) / 0.05), 2), '.2f')) #Sets the value for", "and amount_tendered, thus indicating how much change is owed to the customer, or", "round((total_bill) / 0.05), 2), '.2f')) #Sets the value for the amount of change", "def get_barcode(item_scanned): \"\"\" (str) -> str Returns the type of item that has", "cash only. If you'd like to cancel this purchase, just hit 0 again.\")", "only. If you'd like to cancel this purchase, just hit 0 again.\") if", "0.09 will round up to 0.10 >>> display_change(10.0,7.97) 2.05 >>> display_change(10.5,2.0) 8.50 >>>", "0.10 >>> calculate_total_bill(3.0) 3.40 >>> calculate_total_bill(6.67) 7.55 >>> calculate_total_bill(2.05) 2.30 \"\"\" HST_RATE =", "means done), the total price ('total_bill') after HST is shown to the customer", "decimal points in the format of $0.00 print(\"\\nHere is your bill! \\nSubtotal: $\",", "0: print (\"You've entered the full amount owed!\") display_totalbill() #If the customer pays", "# coding: utf-8 # In[2]: def square_root( sq_rand: float ) -> float: ret_val", "to 0.04 will round up to 0.05. 0.06 to 0.07 will round down", "and the program ends. elif amount_of_change > 0: print (\"\\nHere is your change!:", "to MinMax!\") #This function is to set values for Universal Price Codes (UPC)", "Returns the type of item that has been scanned based on the UPC", "'amount_tendered' variable >>> get_amount_tendered(30) 30 >>> get_amount_tendered(40) 40 >>>get_amount_tendered(50) 50 >>>get_amount_tendered(0) Thanks for", "a farewell greeting #All values returned are displayed with two decimal points in", "owe: $\",format(round(0.05 * round((total_bill) / 0.05), 2), '.2f')) #Sets the value for the", "sign to the MinMax Store display_welcome() #This while loop represents the scanning input,", "subtotal before tax, HST added to the bill, Total price before rounding to", "# As the loop continues, the customer's subtotal so far will continue to", "entered the full amount owed!\") display_totalbill() #If the customer pays more than the", "item that has been scanned based on the UPC of the item that", "== '0': return 'done' else: print (\"Oops! You entered an unrecognized Universal Price", "an item is scanned def calculate_subtotal(product_scanned): \"\"\" (str) -> int Returns the subtotal", "UPC_LARGE variables >>> calculate_subtotal('111111') 1 >>> calculate_subtotal('666666') 5 >>> calculate_subtotal('242424') 19 \"\"\" subtotal_before_tax", "elif product_scanned == UPC_LARGE: subtotal_before_tax += PRICE_LARGE return subtotal_before_tax #This function gets how", "of $0.00 print(\"\\nHere is your bill! \\nSubtotal: $\", format(subtotal_before_tax, '.2f')) print(\"HST: $\", format(0.13", "print(\"Total price before rounding: $\", format(subtotal_before_tax * 1.13, '.2f')) print(\"Total price after rounding:", "full amount of $\",total_bill) #If the customer pays the full amount owed on", "== 0: return \"end\" else: return amount_tendered #This function displays the change given", "total bil is displayed, change is given to the customer and the program", "square_root(144) # In[4]: def display_welcome(): \"\"\" Returns string 'Welcome to MinMax' >>> display_welcome()", "two decimal points in the format of $0.00 while amount_of_change < 0: amount_tendered", "is less than the cost of the total bill, the customer is prompted", "given to the customer and the program ends. elif amount_of_change > 0: print", "receipt #All values returned are displayed with two decimal points in the format", "on the total bill, the receipt of the total bill is displayed and", "/ 2.0) return ret_val # In[3]: square_root(144) # In[4]: def display_welcome(): \"\"\" Returns", "format(subtotal_before_tax,'.2f')) #Once the loop is over and 0 has been pressed (because 0", "= float(calculate_total_bill(subtotal_before_tax)) print(\"\\nAfter taxes, you owe: $\",format(round(0.05 * round((total_bill) / 0.05), 2), '.2f'))", "< 0: amount_tendered = float(get_amount_tendered()) #If customer enters 0, the the order is", "functions for the total bill which includes: subtotal before tax, HST added to", "product to scan def get_barcode(item_scanned): \"\"\" (str) -> str Returns the type of", "calculate_subtotal('242424') 19 \"\"\" subtotal_before_tax = 0 PRICE_SINGLE = 1 PRICE_SMALL = 5 PRICE_LARGE", "0. 03 to 0.04 will round up to 0.05. 0.06 to 0.07 will", ">>> calculate_total_bill(3.0) 3.40 >>> calculate_total_bill(6.67) 7.55 >>> calculate_total_bill(2.05) 2.30 \"\"\" HST_RATE = 1.13", "the values of subtotal_before_tax and item to be used in the upcoming loops", "of the item that has been scanned i.e. the \"item_scanned\" parameter >>>get_barcode('111111') 'SINGLES'", "represents the scanning input, the cashier will continue to scan items until he", "= '666666' UPC_LARGE = '242424' if product_scanned == UPC_SINGLE: subtotal_before_tax +=PRICE_SINGLE elif product_scanned", "includes: subtotal before tax, HST added to the bill, Total price before rounding", "far is: $\", format(subtotal_before_tax,'.2f')) #Once the loop is over and 0 has been", "Canada: 0.01 to 0.02 will round down to 0.00. 0. 03 to 0.04", "full amount owed on the total bill, the receipt of the total bill", "the total bill, the customer is prompted to try again to pay full", "item = input(\"Scan your items that you would like to purchase here, hit", "you for shopping with MinMax!\") #The main function starts here if __name__ ==", "variable >>> get_amount_tendered(30) 30 >>> get_amount_tendered(40) 40 >>>get_amount_tendered(50) 50 >>>get_amount_tendered(0) Thanks for shopping", "print(\"\\nThank you for shopping with MinMax!\") #The main function starts here if __name__", "If the customer provides any other value, this is captured as the amount", "# In[2]: def square_root( sq_rand: float ) -> float: ret_val = sq_rand diff", "using an input prompt. If the cashier hits '0', the program is ended", "(str) -> str Returns the type of item that has been scanned based", "item = True #This displays the welcome sign to the MinMax Store display_welcome()", "returned are displayed with two decimal points in the format of $0.00 print(\"\\nHere", "= float(get_amount_tendered()) #If customer enters 0, the the order is cancelled. The customer", "get_barcode(item)!= 'done': item = input(\"Scan your items that you would like to purchase", "- ret_val*ret_val while abs(diff) > 0.000001: diff = sq_rand - ret_val*ret_val ret_val =", "\"\"\" (float) -> float subtotal is passed through as an input HST_RATE variable", "MinMax!' \"\"\" print(\"Welcome to MinMax!\") #This function is to set values for Universal", "process again by re-running the program. if amount_tendered == 0: sys.exit(\"Thanks for shopping", "(\"Oops! You entered an unrecognized Universal Price Code (UPC). Please enter an appropriate", "to 0.07 will round down to 0.05. 0.08 to 0.09 will round up", "nearest 5 cents using the following nickel rounding scheme standard rules in Canada:", "PRICE_SMALL = 5 PRICE_LARGE = 19 UPC_SINGLE = '111111' UPC_SMALL = '666666' UPC_LARGE", "between total_bill and amount_tendered, thus indicating how much change is owed to the", "change owed to the customer and a farewell greeting #All values returned are", "cashier will continue to scan items until he is done (i.e. hits 0)", "'done': item = input(\"Scan your items that you would like to purchase here,", "the customer and provides a final receipt #All values returned are displayed with", "cents using the nickel rounding scheme mentioned already total_bill = float(calculate_total_bill(subtotal_before_tax)) print(\"\\nAfter taxes,", "total bill is displayed and the program ends elif amount_of_change == 0: print", "return \"end\" else: return amount_tendered #This function displays the change given to the", "UPC of the item that has been scanned i.e. the \"item_scanned\" parameter >>>get_barcode('111111')", "* subtotal_before_tax, '.2f')) print(\"Total price before rounding: $\", format(subtotal_before_tax * 1.13, '.2f')) print(\"Total", "(UPC). Please enter an appropriate UPC for your item\") #This function calculates the", "sq_rand - ret_val*ret_val ret_val = ret_val + (diff / 2.0) return ret_val #", "to the nearest 5 cents using the following nickel rounding scheme standard rules", "total bill, the customer is prompted to try again to pay full amount", "== '242424': return 'LARGE' elif item_scanned == '0': return 'done' else: print (\"Oops!", "the cost of the total bill, the customer is prompted to try again", "0.000001: diff = sq_rand - ret_val*ret_val ret_val = ret_val + (diff / 2.0)", "store. The variable \"difference\" is formatted to return as a float with two", "+=PRICE_SINGLE elif product_scanned == UPC_SMALL: subtotal_before_tax +=PRICE_SMALL elif product_scanned == UPC_LARGE: subtotal_before_tax +=", "\"total\" is then rounded to the nearest 5 cents using the following nickel", "returns the resulting variable \"total\", rounded and formatted to 2 decimal points. Variable", "inputted variable Function returns the resulting variable \"total\", rounded and formatted to 2", "re-running the program. if amount_tendered == 0: sys.exit(\"Thanks for shopping at MinMax! You", "of subtotal_before_tax and item to be used in the upcoming loops subtotal_before_tax =", "the customer's subtotal so far will continue to accumulate and show on the", "via cash only. If you'd like to cancel this purchase, just hit 0", "the customer #'total_bill' is rounded to the nearest 5 cents using the nickel", "resulting variable \"total\", rounded and formatted to 2 decimal points. Variable \"total\" is", "tax, HST added to the bill, Total price before rounding to the nearest" ]
[ "self.weights = np.random.normal(0, 0.1, no_of_inputs + 1) self.datapoints = datapoints self.plotResult = list()", "+= 1 for inputs, label in zip(training_inputs, labels): prediction = self.predict(inputs) error_rate =", "/ float(len(training_inputs)) * 100.0 return _acc def plot(self): total_data = len(self.datapoints) with np.printoptions(precision=7,", "points {}\".format(iteration, misclassified)) print(\"Evaluation {}%\".format(self.evaluate())) def evaluate(self): correct = 0 training_inputs, labels =", "error_rate * inputs self.weights[0] += self.learning_rate * error_rate self.plotResult.append(misclassified) if misclassified < self.bestResult:", "== 1) or (label == -1 and prediction == -1): correct += 1", "matplotlib.pyplot as plt import copy class PocketAlgorithm: def __init__(self, datapoints, no_of_inputs, iteration=7000, learning_rate=0.0001):", "1 iteration = 0 while iteration < self.iteration: misclassified = 0 iteration +=", "learning_rate self.weights = np.random.normal(0, 0.1, no_of_inputs + 1) self.datapoints = datapoints self.plotResult =", "{}%\".format(((total_data - self.bestResult) / float(total_data)) * 100)) plt.plot(np.arange(0, self.iteration), self.plotResult) plt.xlabel(\"Iterations\") plt.ylabel(\"Misclassified Points\")", "np.dot(inputs, self.weights[1:]) + self.weights[0] if summation > 0: activation = 1 else: activation", "After Final iteration: \", self.weights.transpose()) print(\"Best Weights of Pocket: \", self.bestWeights.transpose()) print(\"Best Accuracy", "Author: <NAME> <<EMAIL>> <NAME> <<EMAIL>> ''' import numpy as np import matplotlib.pyplot as", "Result: \", self.bestResult) print(\"Weight After Final iteration: \", self.weights.transpose()) print(\"Best Weights of Pocket:", "label == 1 else -1 if (label == 1 and prediction == -1)", "-1 if (label == 1 and prediction == -1) or (label == -1", "< self.bestResult: self.bestResult = misclassified self.bestWeights = copy.deepcopy(self.weights) if iteration % 500 ==", "\") print(\"Iteration {}, misclassified points {}\".format(iteration, misclassified)) print(\"Evaluation {}%\".format(self.evaluate())) def evaluate(self): correct =", "return data if __name__ == '__main__': data_points = np.array(getInputData('classification.txt')) no_of_inputs = 3 pck", "Weights of Pocket: \", self.bestWeights.transpose()) print(\"Best Accuracy of Pocket: {}%\".format(((total_data - self.bestResult) /", "== -1 and prediction == 1): misclassified += 1 self.weights[1:] += self.learning_rate *", "if misclassified < self.bestResult: self.bestResult = misclassified self.bestWeights = copy.deepcopy(self.weights) if iteration %", "import numpy as np import matplotlib.pyplot as plt import copy class PocketAlgorithm: def", "error_rate = 1 if label == 1 else -1 if (label == 1", "self.plotResult.append(misclassified) if misclassified < self.bestResult: self.bestResult = misclassified self.bestWeights = copy.deepcopy(self.weights) if iteration", "while iteration < self.iteration: misclassified = 0 iteration += 1 for inputs, label", "predict(self, inputs): summation = np.dot(inputs, self.weights[1:]) + self.weights[0] if summation > 0: activation", "activation = 1 else: activation = -1 return activation def train(self): training_inputs, labels", "-1 and prediction == 1): misclassified += 1 self.weights[1:] += self.learning_rate * error_rate", "iteration % 500 == 0: print(\"Iteration {}, misclassified points {}, Evaluation {}%\".format(iteration, misclassified,", "+= 1 _acc = correct / float(len(training_inputs)) * 100.0 return _acc def plot(self):", "float(len(training_inputs)) * 100.0 return _acc def plot(self): total_data = len(self.datapoints) with np.printoptions(precision=7, suppress=True):", "Points\") plt.axis([0, self.iteration, 800, 1200]) plt.show() def getInputData(filename): data = np.genfromtxt(filename, delimiter=',') return", "plt.axis([0, self.iteration, 800, 1200]) plt.show() def getInputData(filename): data = np.genfromtxt(filename, delimiter=',') return data", "0 while iteration < self.iteration: misclassified = 0 iteration += 1 for inputs,", "plt.xlabel(\"Iterations\") plt.ylabel(\"Misclassified Points\") plt.axis([0, self.iteration, 800, 1200]) plt.show() def getInputData(filename): data = np.genfromtxt(filename,", "misclassified points {}, Evaluation {}%\".format(iteration, misclassified, self.evaluate())) print(\"\") print(\"======== Result ========= \") print(\"Iteration", "self.plotResult) plt.xlabel(\"Iterations\") plt.ylabel(\"Misclassified Points\") plt.axis([0, self.iteration, 800, 1200]) plt.show() def getInputData(filename): data =", "misclassified = 0 iteration += 1 for inputs, label in zip(training_inputs, labels): prediction", "== -1) or (label == -1 and prediction == 1): misclassified += 1", "iteration self.learning_rate = learning_rate self.weights = np.random.normal(0, 0.1, no_of_inputs + 1) self.datapoints =", "iteration < self.iteration: misclassified = 0 iteration += 1 for inputs, label in", "= np.genfromtxt(filename, delimiter=',') return data if __name__ == '__main__': data_points = np.array(getInputData('classification.txt')) no_of_inputs", "print(\"Best Weights of Pocket: \", self.bestWeights.transpose()) print(\"Best Accuracy of Pocket: {}%\".format(((total_data - self.bestResult)", "{}\".format(iteration, misclassified)) print(\"Evaluation {}%\".format(self.evaluate())) def evaluate(self): correct = 0 training_inputs, labels = self.datapoints[:,:-2],", "np.genfromtxt(filename, delimiter=',') return data if __name__ == '__main__': data_points = np.array(getInputData('classification.txt')) no_of_inputs =", "-1): correct += 1 _acc = correct / float(len(training_inputs)) * 100.0 return _acc", "Points/Best Result: \", self.bestResult) print(\"Weight After Final iteration: \", self.weights.transpose()) print(\"Best Weights of", "self.iteration = iteration self.learning_rate = learning_rate self.weights = np.random.normal(0, 0.1, no_of_inputs + 1)", "self.iteration), self.plotResult) plt.xlabel(\"Iterations\") plt.ylabel(\"Misclassified Points\") plt.axis([0, self.iteration, 800, 1200]) plt.show() def getInputData(filename): data", "self.predict(inputs) error_rate = 1 if label == 1 else -1 if (label ==", "= 0 while iteration < self.iteration: misclassified = 0 iteration += 1 for", "data_points = np.array(getInputData('classification.txt')) no_of_inputs = 3 pck = PocketAlgorithm(data_points, no_of_inputs) pck.train() pck.evaluate() pck.plot()", "print(\"Iteration {}, misclassified points {}\".format(iteration, misclassified)) print(\"Evaluation {}%\".format(self.evaluate())) def evaluate(self): correct = 0", "labels): prediction = self.predict(inputs) error_rate = 1 if label == 1 else -1", "datapoints, no_of_inputs, iteration=7000, learning_rate=0.0001): self.iteration = iteration self.learning_rate = learning_rate self.weights = np.random.normal(0,", "100.0 return _acc def plot(self): total_data = len(self.datapoints) with np.printoptions(precision=7, suppress=True): print(\"Minimum Misclassified", "def plot(self): total_data = len(self.datapoints) with np.printoptions(precision=7, suppress=True): print(\"Minimum Misclassified Points/Best Result: \",", "* inputs self.weights[0] += self.learning_rate * error_rate self.plotResult.append(misclassified) if misclassified < self.bestResult: self.bestResult", "\", self.bestResult) print(\"Weight After Final iteration: \", self.weights.transpose()) print(\"Best Weights of Pocket: \",", "100)) plt.plot(np.arange(0, self.iteration), self.plotResult) plt.xlabel(\"Iterations\") plt.ylabel(\"Misclassified Points\") plt.axis([0, self.iteration, 800, 1200]) plt.show() def", "{}%\".format(self.evaluate())) def evaluate(self): correct = 0 training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:] for inputs,", "np.random.normal(0, 0.1, no_of_inputs + 1) self.datapoints = datapoints self.plotResult = list() self.bestResult =", "return activation def train(self): training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:] misclassified = 1 iteration", "data if __name__ == '__main__': data_points = np.array(getInputData('classification.txt')) no_of_inputs = 3 pck =", "= self.predict(inputs) if (label == 1 and prediction == 1) or (label ==", "= correct / float(len(training_inputs)) * 100.0 return _acc def plot(self): total_data = len(self.datapoints)", "numpy as np import matplotlib.pyplot as plt import copy class PocketAlgorithm: def __init__(self,", "== -1 and prediction == -1): correct += 1 _acc = correct /", "if summation > 0: activation = 1 else: activation = -1 return activation", "= -1 return activation def train(self): training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:] misclassified =", "or (label == -1 and prediction == 1): misclassified += 1 self.weights[1:] +=", "print(\"Evaluation {}%\".format(self.evaluate())) def evaluate(self): correct = 0 training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:] for", "_acc def plot(self): total_data = len(self.datapoints) with np.printoptions(precision=7, suppress=True): print(\"Minimum Misclassified Points/Best Result:", "* error_rate self.plotResult.append(misclassified) if misclassified < self.bestResult: self.bestResult = misclassified self.bestWeights = copy.deepcopy(self.weights)", "learning_rate=0.0001): self.iteration = iteration self.learning_rate = learning_rate self.weights = np.random.normal(0, 0.1, no_of_inputs +", "= self.predict(inputs) error_rate = 1 if label == 1 else -1 if (label", "print(\"Iteration {}, misclassified points {}, Evaluation {}%\".format(iteration, misclassified, self.evaluate())) print(\"\") print(\"======== Result =========", "misclassified, self.evaluate())) print(\"\") print(\"======== Result ========= \") print(\"Iteration {}, misclassified points {}\".format(iteration, misclassified))", "labels): prediction = self.predict(inputs) if (label == 1 and prediction == 1) or", "inputs, label in zip(training_inputs, labels): prediction = self.predict(inputs) error_rate = 1 if label", "1) self.datapoints = datapoints self.plotResult = list() self.bestResult = float(\"inf\") self.bestWeights = np.array", "else -1 if (label == 1 and prediction == -1) or (label ==", "+ self.weights[0] if summation > 0: activation = 1 else: activation = -1", "self.datapoints[:,:-2], self.datapoints[:,-1:] for inputs, label in zip(training_inputs, labels): prediction = self.predict(inputs) if (label", "for inputs, label in zip(training_inputs, labels): prediction = self.predict(inputs) if (label == 1", "float(total_data)) * 100)) plt.plot(np.arange(0, self.iteration), self.plotResult) plt.xlabel(\"Iterations\") plt.ylabel(\"Misclassified Points\") plt.axis([0, self.iteration, 800, 1200])", "misclassified += 1 self.weights[1:] += self.learning_rate * error_rate * inputs self.weights[0] += self.learning_rate", "{}, misclassified points {}\".format(iteration, misclassified)) print(\"Evaluation {}%\".format(self.evaluate())) def evaluate(self): correct = 0 training_inputs,", "list() self.bestResult = float(\"inf\") self.bestWeights = np.array def predict(self, inputs): summation = np.dot(inputs,", "import copy class PocketAlgorithm: def __init__(self, datapoints, no_of_inputs, iteration=7000, learning_rate=0.0001): self.iteration = iteration", "= 0 iteration += 1 for inputs, label in zip(training_inputs, labels): prediction =", "- self.bestResult) / float(total_data)) * 100)) plt.plot(np.arange(0, self.iteration), self.plotResult) plt.xlabel(\"Iterations\") plt.ylabel(\"Misclassified Points\") plt.axis([0,", "summation = np.dot(inputs, self.weights[1:]) + self.weights[0] if summation > 0: activation = 1", "points {}, Evaluation {}%\".format(iteration, misclassified, self.evaluate())) print(\"\") print(\"======== Result ========= \") print(\"Iteration {},", "and prediction == -1) or (label == -1 and prediction == 1): misclassified", "print(\"======== Result ========= \") print(\"Iteration {}, misclassified points {}\".format(iteration, misclassified)) print(\"Evaluation {}%\".format(self.evaluate())) def", "Misclassified Points/Best Result: \", self.bestResult) print(\"Weight After Final iteration: \", self.weights.transpose()) print(\"Best Weights", "if (label == 1 and prediction == -1) or (label == -1 and", "1 self.weights[1:] += self.learning_rate * error_rate * inputs self.weights[0] += self.learning_rate * error_rate", "inputs, label in zip(training_inputs, labels): prediction = self.predict(inputs) if (label == 1 and", "print(\"\") print(\"======== Result ========= \") print(\"Iteration {}, misclassified points {}\".format(iteration, misclassified)) print(\"Evaluation {}%\".format(self.evaluate()))", "label in zip(training_inputs, labels): prediction = self.predict(inputs) if (label == 1 and prediction", "total_data = len(self.datapoints) with np.printoptions(precision=7, suppress=True): print(\"Minimum Misclassified Points/Best Result: \", self.bestResult) print(\"Weight", "= float(\"inf\") self.bestWeights = np.array def predict(self, inputs): summation = np.dot(inputs, self.weights[1:]) +", "== 1): misclassified += 1 self.weights[1:] += self.learning_rate * error_rate * inputs self.weights[0]", "misclassified self.bestWeights = copy.deepcopy(self.weights) if iteration % 500 == 0: print(\"Iteration {}, misclassified", "iteration = 0 while iteration < self.iteration: misclassified = 0 iteration += 1", "data = np.genfromtxt(filename, delimiter=',') return data if __name__ == '__main__': data_points = np.array(getInputData('classification.txt'))", "summation > 0: activation = 1 else: activation = -1 return activation def", "def predict(self, inputs): summation = np.dot(inputs, self.weights[1:]) + self.weights[0] if summation > 0:", "========= \") print(\"Iteration {}, misclassified points {}\".format(iteration, misclassified)) print(\"Evaluation {}%\".format(self.evaluate())) def evaluate(self): correct", "def getInputData(filename): data = np.genfromtxt(filename, delimiter=',') return data if __name__ == '__main__': data_points", "if label == 1 else -1 if (label == 1 and prediction ==", "<<EMAIL>> ''' import numpy as np import matplotlib.pyplot as plt import copy class", "< self.iteration: misclassified = 0 iteration += 1 for inputs, label in zip(training_inputs,", "self.bestResult) print(\"Weight After Final iteration: \", self.weights.transpose()) print(\"Best Weights of Pocket: \", self.bestWeights.transpose())", "self.weights[0] += self.learning_rate * error_rate self.plotResult.append(misclassified) if misclassified < self.bestResult: self.bestResult = misclassified", "self.datapoints[:,-1:] for inputs, label in zip(training_inputs, labels): prediction = self.predict(inputs) if (label ==", "0 iteration += 1 for inputs, label in zip(training_inputs, labels): prediction = self.predict(inputs)", "activation def train(self): training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:] misclassified = 1 iteration =", "__init__(self, datapoints, no_of_inputs, iteration=7000, learning_rate=0.0001): self.iteration = iteration self.learning_rate = learning_rate self.weights =", "prediction = self.predict(inputs) error_rate = 1 if label == 1 else -1 if", "return _acc def plot(self): total_data = len(self.datapoints) with np.printoptions(precision=7, suppress=True): print(\"Minimum Misclassified Points/Best", "1 else -1 if (label == 1 and prediction == -1) or (label", "zip(training_inputs, labels): prediction = self.predict(inputs) error_rate = 1 if label == 1 else", "evaluate(self): correct = 0 training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:] for inputs, label in", "0: activation = 1 else: activation = -1 return activation def train(self): training_inputs,", "if iteration % 500 == 0: print(\"Iteration {}, misclassified points {}, Evaluation {}%\".format(iteration,", "self.weights[1:]) + self.weights[0] if summation > 0: activation = 1 else: activation =", "self.weights.transpose()) print(\"Best Weights of Pocket: \", self.bestWeights.transpose()) print(\"Best Accuracy of Pocket: {}%\".format(((total_data -", "{}, misclassified points {}, Evaluation {}%\".format(iteration, misclassified, self.evaluate())) print(\"\") print(\"======== Result ========= \")", "<reponame>appielife/Machine-Learning--Perceptron-Pocket-Linear-and-Logical-Regresstion-Algorithm<filename>PocketAlgorithm_v1.py ''' Author: <NAME> <<EMAIL>> <NAME> <<EMAIL>> ''' import numpy as np import", "plt import copy class PocketAlgorithm: def __init__(self, datapoints, no_of_inputs, iteration=7000, learning_rate=0.0001): self.iteration =", "self.plotResult = list() self.bestResult = float(\"inf\") self.bestWeights = np.array def predict(self, inputs): summation", "prediction == 1): misclassified += 1 self.weights[1:] += self.learning_rate * error_rate * inputs", "def evaluate(self): correct = 0 training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:] for inputs, label", "== 1 and prediction == 1) or (label == -1 and prediction ==", "copy.deepcopy(self.weights) if iteration % 500 == 0: print(\"Iteration {}, misclassified points {}, Evaluation", "= self.datapoints[:,:-2], self.datapoints[:,-1:] misclassified = 1 iteration = 0 while iteration < self.iteration:", "iteration += 1 for inputs, label in zip(training_inputs, labels): prediction = self.predict(inputs) error_rate", "500 == 0: print(\"Iteration {}, misclassified points {}, Evaluation {}%\".format(iteration, misclassified, self.evaluate())) print(\"\")", "correct = 0 training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:] for inputs, label in zip(training_inputs,", "print(\"Best Accuracy of Pocket: {}%\".format(((total_data - self.bestResult) / float(total_data)) * 100)) plt.plot(np.arange(0, self.iteration),", "activation = -1 return activation def train(self): training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:] misclassified", "(label == 1 and prediction == -1) or (label == -1 and prediction", "1 and prediction == -1) or (label == -1 and prediction == 1):", "+= self.learning_rate * error_rate * inputs self.weights[0] += self.learning_rate * error_rate self.plotResult.append(misclassified) if", "0: print(\"Iteration {}, misclassified points {}, Evaluation {}%\".format(iteration, misclassified, self.evaluate())) print(\"\") print(\"======== Result", "= self.datapoints[:,:-2], self.datapoints[:,-1:] for inputs, label in zip(training_inputs, labels): prediction = self.predict(inputs) if", "{}, Evaluation {}%\".format(iteration, misclassified, self.evaluate())) print(\"\") print(\"======== Result ========= \") print(\"Iteration {}, misclassified", "len(self.datapoints) with np.printoptions(precision=7, suppress=True): print(\"Minimum Misclassified Points/Best Result: \", self.bestResult) print(\"Weight After Final", "as np import matplotlib.pyplot as plt import copy class PocketAlgorithm: def __init__(self, datapoints,", "misclassified points {}\".format(iteration, misclassified)) print(\"Evaluation {}%\".format(self.evaluate())) def evaluate(self): correct = 0 training_inputs, labels", "if __name__ == '__main__': data_points = np.array(getInputData('classification.txt')) no_of_inputs = 3 pck = PocketAlgorithm(data_points,", "= 1 iteration = 0 while iteration < self.iteration: misclassified = 0 iteration", "label in zip(training_inputs, labels): prediction = self.predict(inputs) error_rate = 1 if label ==", "self.weights[1:] += self.learning_rate * error_rate * inputs self.weights[0] += self.learning_rate * error_rate self.plotResult.append(misclassified)", "= np.dot(inputs, self.weights[1:]) + self.weights[0] if summation > 0: activation = 1 else:", "plot(self): total_data = len(self.datapoints) with np.printoptions(precision=7, suppress=True): print(\"Minimum Misclassified Points/Best Result: \", self.bestResult)", "self.datapoints[:,-1:] misclassified = 1 iteration = 0 while iteration < self.iteration: misclassified =", "== 1 and prediction == -1) or (label == -1 and prediction ==", "correct / float(len(training_inputs)) * 100.0 return _acc def plot(self): total_data = len(self.datapoints) with", "plt.ylabel(\"Misclassified Points\") plt.axis([0, self.iteration, 800, 1200]) plt.show() def getInputData(filename): data = np.genfromtxt(filename, delimiter=',')", "1): misclassified += 1 self.weights[1:] += self.learning_rate * error_rate * inputs self.weights[0] +=", "PocketAlgorithm: def __init__(self, datapoints, no_of_inputs, iteration=7000, learning_rate=0.0001): self.iteration = iteration self.learning_rate = learning_rate", "''' Author: <NAME> <<EMAIL>> <NAME> <<EMAIL>> ''' import numpy as np import matplotlib.pyplot", "no_of_inputs + 1) self.datapoints = datapoints self.plotResult = list() self.bestResult = float(\"inf\") self.bestWeights", "1 and prediction == 1) or (label == -1 and prediction == -1):", "1 _acc = correct / float(len(training_inputs)) * 100.0 return _acc def plot(self): total_data", "= np.random.normal(0, 0.1, no_of_inputs + 1) self.datapoints = datapoints self.plotResult = list() self.bestResult", "if (label == 1 and prediction == 1) or (label == -1 and", "plt.show() def getInputData(filename): data = np.genfromtxt(filename, delimiter=',') return data if __name__ == '__main__':", "'__main__': data_points = np.array(getInputData('classification.txt')) no_of_inputs = 3 pck = PocketAlgorithm(data_points, no_of_inputs) pck.train() pck.evaluate()", "inputs): summation = np.dot(inputs, self.weights[1:]) + self.weights[0] if summation > 0: activation =", "= 0 training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:] for inputs, label in zip(training_inputs, labels):", "with np.printoptions(precision=7, suppress=True): print(\"Minimum Misclassified Points/Best Result: \", self.bestResult) print(\"Weight After Final iteration:", "self.bestResult = misclassified self.bestWeights = copy.deepcopy(self.weights) if iteration % 500 == 0: print(\"Iteration", "> 0: activation = 1 else: activation = -1 return activation def train(self):", "misclassified)) print(\"Evaluation {}%\".format(self.evaluate())) def evaluate(self): correct = 0 training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:]", "iteration=7000, learning_rate=0.0001): self.iteration = iteration self.learning_rate = learning_rate self.weights = np.random.normal(0, 0.1, no_of_inputs", "in zip(training_inputs, labels): prediction = self.predict(inputs) error_rate = 1 if label == 1", "0 training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:] for inputs, label in zip(training_inputs, labels): prediction", "self.learning_rate * error_rate * inputs self.weights[0] += self.learning_rate * error_rate self.plotResult.append(misclassified) if misclassified", "self.weights[0] if summation > 0: activation = 1 else: activation = -1 return", "error_rate self.plotResult.append(misclassified) if misclassified < self.bestResult: self.bestResult = misclassified self.bestWeights = copy.deepcopy(self.weights) if", "prediction == -1) or (label == -1 and prediction == 1): misclassified +=", "Evaluation {}%\".format(iteration, misclassified, self.evaluate())) print(\"\") print(\"======== Result ========= \") print(\"Iteration {}, misclassified points", "= iteration self.learning_rate = learning_rate self.weights = np.random.normal(0, 0.1, no_of_inputs + 1) self.datapoints", "correct += 1 _acc = correct / float(len(training_inputs)) * 100.0 return _acc def", "1) or (label == -1 and prediction == -1): correct += 1 _acc", "self.evaluate())) print(\"\") print(\"======== Result ========= \") print(\"Iteration {}, misclassified points {}\".format(iteration, misclassified)) print(\"Evaluation", "np.printoptions(precision=7, suppress=True): print(\"Minimum Misclassified Points/Best Result: \", self.bestResult) print(\"Weight After Final iteration: \",", "print(\"Weight After Final iteration: \", self.weights.transpose()) print(\"Best Weights of Pocket: \", self.bestWeights.transpose()) print(\"Best", "(label == -1 and prediction == 1): misclassified += 1 self.weights[1:] += self.learning_rate", "inputs self.weights[0] += self.learning_rate * error_rate self.plotResult.append(misclassified) if misclassified < self.bestResult: self.bestResult =", "Pocket: \", self.bestWeights.transpose()) print(\"Best Accuracy of Pocket: {}%\".format(((total_data - self.bestResult) / float(total_data)) *", "copy class PocketAlgorithm: def __init__(self, datapoints, no_of_inputs, iteration=7000, learning_rate=0.0001): self.iteration = iteration self.learning_rate", "self.learning_rate = learning_rate self.weights = np.random.normal(0, 0.1, no_of_inputs + 1) self.datapoints = datapoints", "+ 1) self.datapoints = datapoints self.plotResult = list() self.bestResult = float(\"inf\") self.bestWeights =", "= copy.deepcopy(self.weights) if iteration % 500 == 0: print(\"Iteration {}, misclassified points {},", "self.predict(inputs) if (label == 1 and prediction == 1) or (label == -1", "class PocketAlgorithm: def __init__(self, datapoints, no_of_inputs, iteration=7000, learning_rate=0.0001): self.iteration = iteration self.learning_rate =", "1200]) plt.show() def getInputData(filename): data = np.genfromtxt(filename, delimiter=',') return data if __name__ ==", "float(\"inf\") self.bestWeights = np.array def predict(self, inputs): summation = np.dot(inputs, self.weights[1:]) + self.weights[0]", "self.datapoints[:,:-2], self.datapoints[:,-1:] misclassified = 1 iteration = 0 while iteration < self.iteration: misclassified", "_acc = correct / float(len(training_inputs)) * 100.0 return _acc def plot(self): total_data =", "Pocket: {}%\".format(((total_data - self.bestResult) / float(total_data)) * 100)) plt.plot(np.arange(0, self.iteration), self.plotResult) plt.xlabel(\"Iterations\") plt.ylabel(\"Misclassified", "plt.plot(np.arange(0, self.iteration), self.plotResult) plt.xlabel(\"Iterations\") plt.ylabel(\"Misclassified Points\") plt.axis([0, self.iteration, 800, 1200]) plt.show() def getInputData(filename):", "self.bestResult = float(\"inf\") self.bestWeights = np.array def predict(self, inputs): summation = np.dot(inputs, self.weights[1:])", "= len(self.datapoints) with np.printoptions(precision=7, suppress=True): print(\"Minimum Misclassified Points/Best Result: \", self.bestResult) print(\"Weight After", "np.array def predict(self, inputs): summation = np.dot(inputs, self.weights[1:]) + self.weights[0] if summation >", "self.bestResult) / float(total_data)) * 100)) plt.plot(np.arange(0, self.iteration), self.plotResult) plt.xlabel(\"Iterations\") plt.ylabel(\"Misclassified Points\") plt.axis([0, self.iteration,", "* 100)) plt.plot(np.arange(0, self.iteration), self.plotResult) plt.xlabel(\"Iterations\") plt.ylabel(\"Misclassified Points\") plt.axis([0, self.iteration, 800, 1200]) plt.show()", "== 0: print(\"Iteration {}, misclassified points {}, Evaluation {}%\".format(iteration, misclassified, self.evaluate())) print(\"\") print(\"========", "prediction = self.predict(inputs) if (label == 1 and prediction == 1) or (label", "Result ========= \") print(\"Iteration {}, misclassified points {}\".format(iteration, misclassified)) print(\"Evaluation {}%\".format(self.evaluate())) def evaluate(self):", "<NAME> <<EMAIL>> ''' import numpy as np import matplotlib.pyplot as plt import copy", "else: activation = -1 return activation def train(self): training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:]", "self.bestWeights = copy.deepcopy(self.weights) if iteration % 500 == 0: print(\"Iteration {}, misclassified points", "suppress=True): print(\"Minimum Misclassified Points/Best Result: \", self.bestResult) print(\"Weight After Final iteration: \", self.weights.transpose())", "1 for inputs, label in zip(training_inputs, labels): prediction = self.predict(inputs) error_rate = 1", "training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:] for inputs, label in zip(training_inputs, labels): prediction =", "self.iteration, 800, 1200]) plt.show() def getInputData(filename): data = np.genfromtxt(filename, delimiter=',') return data if", "self.bestWeights = np.array def predict(self, inputs): summation = np.dot(inputs, self.weights[1:]) + self.weights[0] if", "prediction == 1) or (label == -1 and prediction == -1): correct +=", "% 500 == 0: print(\"Iteration {}, misclassified points {}, Evaluation {}%\".format(iteration, misclassified, self.evaluate()))", "+= self.learning_rate * error_rate self.plotResult.append(misclassified) if misclassified < self.bestResult: self.bestResult = misclassified self.bestWeights", "= 1 if label == 1 else -1 if (label == 1 and", "{}%\".format(iteration, misclassified, self.evaluate())) print(\"\") print(\"======== Result ========= \") print(\"Iteration {}, misclassified points {}\".format(iteration,", "(label == 1 and prediction == 1) or (label == -1 and prediction", "as plt import copy class PocketAlgorithm: def __init__(self, datapoints, no_of_inputs, iteration=7000, learning_rate=0.0001): self.iteration", "-1 and prediction == -1): correct += 1 _acc = correct / float(len(training_inputs))", "\", self.bestWeights.transpose()) print(\"Best Accuracy of Pocket: {}%\".format(((total_data - self.bestResult) / float(total_data)) * 100))", "800, 1200]) plt.show() def getInputData(filename): data = np.genfromtxt(filename, delimiter=',') return data if __name__", "-1) or (label == -1 and prediction == 1): misclassified += 1 self.weights[1:]", "no_of_inputs, iteration=7000, learning_rate=0.0001): self.iteration = iteration self.learning_rate = learning_rate self.weights = np.random.normal(0, 0.1,", "-1 return activation def train(self): training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:] misclassified = 1", "= misclassified self.bestWeights = copy.deepcopy(self.weights) if iteration % 500 == 0: print(\"Iteration {},", "train(self): training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:] misclassified = 1 iteration = 0 while", "= learning_rate self.weights = np.random.normal(0, 0.1, no_of_inputs + 1) self.datapoints = datapoints self.plotResult", "* 100.0 return _acc def plot(self): total_data = len(self.datapoints) with np.printoptions(precision=7, suppress=True): print(\"Minimum", "Accuracy of Pocket: {}%\".format(((total_data - self.bestResult) / float(total_data)) * 100)) plt.plot(np.arange(0, self.iteration), self.plotResult)", "iteration: \", self.weights.transpose()) print(\"Best Weights of Pocket: \", self.bestWeights.transpose()) print(\"Best Accuracy of Pocket:", "np import matplotlib.pyplot as plt import copy class PocketAlgorithm: def __init__(self, datapoints, no_of_inputs,", "<<EMAIL>> <NAME> <<EMAIL>> ''' import numpy as np import matplotlib.pyplot as plt import", "= list() self.bestResult = float(\"inf\") self.bestWeights = np.array def predict(self, inputs): summation =", "== '__main__': data_points = np.array(getInputData('classification.txt')) no_of_inputs = 3 pck = PocketAlgorithm(data_points, no_of_inputs) pck.train()", "= 1 else: activation = -1 return activation def train(self): training_inputs, labels =", "labels = self.datapoints[:,:-2], self.datapoints[:,-1:] misclassified = 1 iteration = 0 while iteration <", "+= 1 self.weights[1:] += self.learning_rate * error_rate * inputs self.weights[0] += self.learning_rate *", "''' import numpy as np import matplotlib.pyplot as plt import copy class PocketAlgorithm:", "self.learning_rate * error_rate self.plotResult.append(misclassified) if misclassified < self.bestResult: self.bestResult = misclassified self.bestWeights =", "delimiter=',') return data if __name__ == '__main__': data_points = np.array(getInputData('classification.txt')) no_of_inputs = 3", "Final iteration: \", self.weights.transpose()) print(\"Best Weights of Pocket: \", self.bestWeights.transpose()) print(\"Best Accuracy of", "* error_rate * inputs self.weights[0] += self.learning_rate * error_rate self.plotResult.append(misclassified) if misclassified <", "self.bestResult: self.bestResult = misclassified self.bestWeights = copy.deepcopy(self.weights) if iteration % 500 == 0:", "misclassified = 1 iteration = 0 while iteration < self.iteration: misclassified = 0", "misclassified < self.bestResult: self.bestResult = misclassified self.bestWeights = copy.deepcopy(self.weights) if iteration % 500", "or (label == -1 and prediction == -1): correct += 1 _acc =", "== 1 else -1 if (label == 1 and prediction == -1) or", "1 else: activation = -1 return activation def train(self): training_inputs, labels = self.datapoints[:,:-2],", "import matplotlib.pyplot as plt import copy class PocketAlgorithm: def __init__(self, datapoints, no_of_inputs, iteration=7000,", "of Pocket: {}%\".format(((total_data - self.bestResult) / float(total_data)) * 100)) plt.plot(np.arange(0, self.iteration), self.plotResult) plt.xlabel(\"Iterations\")", "self.datapoints = datapoints self.plotResult = list() self.bestResult = float(\"inf\") self.bestWeights = np.array def", "in zip(training_inputs, labels): prediction = self.predict(inputs) if (label == 1 and prediction ==", "print(\"Minimum Misclassified Points/Best Result: \", self.bestResult) print(\"Weight After Final iteration: \", self.weights.transpose()) print(\"Best", "self.bestWeights.transpose()) print(\"Best Accuracy of Pocket: {}%\".format(((total_data - self.bestResult) / float(total_data)) * 100)) plt.plot(np.arange(0,", "0.1, no_of_inputs + 1) self.datapoints = datapoints self.plotResult = list() self.bestResult = float(\"inf\")", "getInputData(filename): data = np.genfromtxt(filename, delimiter=',') return data if __name__ == '__main__': data_points =", "/ float(total_data)) * 100)) plt.plot(np.arange(0, self.iteration), self.plotResult) plt.xlabel(\"Iterations\") plt.ylabel(\"Misclassified Points\") plt.axis([0, self.iteration, 800,", "def train(self): training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:] misclassified = 1 iteration = 0", "of Pocket: \", self.bestWeights.transpose()) print(\"Best Accuracy of Pocket: {}%\".format(((total_data - self.bestResult) / float(total_data))", "1 if label == 1 else -1 if (label == 1 and prediction", "def __init__(self, datapoints, no_of_inputs, iteration=7000, learning_rate=0.0001): self.iteration = iteration self.learning_rate = learning_rate self.weights", "__name__ == '__main__': data_points = np.array(getInputData('classification.txt')) no_of_inputs = 3 pck = PocketAlgorithm(data_points, no_of_inputs)", "(label == -1 and prediction == -1): correct += 1 _acc = correct", "= datapoints self.plotResult = list() self.bestResult = float(\"inf\") self.bestWeights = np.array def predict(self,", "\", self.weights.transpose()) print(\"Best Weights of Pocket: \", self.bestWeights.transpose()) print(\"Best Accuracy of Pocket: {}%\".format(((total_data", "for inputs, label in zip(training_inputs, labels): prediction = self.predict(inputs) error_rate = 1 if", "and prediction == 1): misclassified += 1 self.weights[1:] += self.learning_rate * error_rate *", "labels = self.datapoints[:,:-2], self.datapoints[:,-1:] for inputs, label in zip(training_inputs, labels): prediction = self.predict(inputs)", "and prediction == -1): correct += 1 _acc = correct / float(len(training_inputs)) *", "self.iteration: misclassified = 0 iteration += 1 for inputs, label in zip(training_inputs, labels):", "prediction == -1): correct += 1 _acc = correct / float(len(training_inputs)) * 100.0", "== -1): correct += 1 _acc = correct / float(len(training_inputs)) * 100.0 return", "and prediction == 1) or (label == -1 and prediction == -1): correct", "= np.array def predict(self, inputs): summation = np.dot(inputs, self.weights[1:]) + self.weights[0] if summation", "training_inputs, labels = self.datapoints[:,:-2], self.datapoints[:,-1:] misclassified = 1 iteration = 0 while iteration", "<NAME> <<EMAIL>> <NAME> <<EMAIL>> ''' import numpy as np import matplotlib.pyplot as plt", "datapoints self.plotResult = list() self.bestResult = float(\"inf\") self.bestWeights = np.array def predict(self, inputs):", "zip(training_inputs, labels): prediction = self.predict(inputs) if (label == 1 and prediction == 1)" ]
[ "self.bot = bot @commands.Cog.listener() async def on_ready(self): print('Logged in as') print(self.bot.user.name) print(self.bot.user.id) print('------')", "__init__(self, bot): self.bot = bot @commands.Cog.listener() async def on_ready(self): print('Logged in as') print(self.bot.user.name)", "import commands class Events(commands.Cog): def __init__(self, bot): self.bot = bot @commands.Cog.listener() async def", "def __init__(self, bot): self.bot = bot @commands.Cog.listener() async def on_ready(self): print('Logged in as')", "bot @commands.Cog.listener() async def on_ready(self): print('Logged in as') print(self.bot.user.name) print(self.bot.user.id) print('------') def setup(bot):", "commands class Events(commands.Cog): def __init__(self, bot): self.bot = bot @commands.Cog.listener() async def on_ready(self):", "discord.ext import commands class Events(commands.Cog): def __init__(self, bot): self.bot = bot @commands.Cog.listener() async", "bot): self.bot = bot @commands.Cog.listener() async def on_ready(self): print('Logged in as') print(self.bot.user.name) print(self.bot.user.id)", "@commands.Cog.listener() async def on_ready(self): print('Logged in as') print(self.bot.user.name) print(self.bot.user.id) print('------') def setup(bot): bot.add_cog(Events(bot))", "Events(commands.Cog): def __init__(self, bot): self.bot = bot @commands.Cog.listener() async def on_ready(self): print('Logged in", "from discord.ext import commands class Events(commands.Cog): def __init__(self, bot): self.bot = bot @commands.Cog.listener()", "= bot @commands.Cog.listener() async def on_ready(self): print('Logged in as') print(self.bot.user.name) print(self.bot.user.id) print('------') def", "class Events(commands.Cog): def __init__(self, bot): self.bot = bot @commands.Cog.listener() async def on_ready(self): print('Logged" ]
[ "<reponame>itdagene-ntnu/itdagene from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies", "django.db import migrations, models class Migration(migrations.Migration): dependencies = [(\"company\", \"0016_auto_20160315_1950\")] operations = [", "[(\"company\", \"0016_auto_20160315_1950\")] operations = [ migrations.AlterField( model_name=\"company\", name=\"payment_email\", field=models.EmailField( max_length=75, verbose_name=\"payment email\", blank=True", "import migrations, models class Migration(migrations.Migration): dependencies = [(\"company\", \"0016_auto_20160315_1950\")] operations = [ migrations.AlterField(", "models class Migration(migrations.Migration): dependencies = [(\"company\", \"0016_auto_20160315_1950\")] operations = [ migrations.AlterField( model_name=\"company\", name=\"payment_email\",", "\"0016_auto_20160315_1950\")] operations = [ migrations.AlterField( model_name=\"company\", name=\"payment_email\", field=models.EmailField( max_length=75, verbose_name=\"payment email\", blank=True ),", "from django.db import migrations, models class Migration(migrations.Migration): dependencies = [(\"company\", \"0016_auto_20160315_1950\")] operations =", "dependencies = [(\"company\", \"0016_auto_20160315_1950\")] operations = [ migrations.AlterField( model_name=\"company\", name=\"payment_email\", field=models.EmailField( max_length=75, verbose_name=\"payment", "__future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [(\"company\",", "= [ migrations.AlterField( model_name=\"company\", name=\"payment_email\", field=models.EmailField( max_length=75, verbose_name=\"payment email\", blank=True ), preserve_default=True, )", "= [(\"company\", \"0016_auto_20160315_1950\")] operations = [ migrations.AlterField( model_name=\"company\", name=\"payment_email\", field=models.EmailField( max_length=75, verbose_name=\"payment email\",", "[ migrations.AlterField( model_name=\"company\", name=\"payment_email\", field=models.EmailField( max_length=75, verbose_name=\"payment email\", blank=True ), preserve_default=True, ) ]", "unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [(\"company\", \"0016_auto_20160315_1950\")] operations", "Migration(migrations.Migration): dependencies = [(\"company\", \"0016_auto_20160315_1950\")] operations = [ migrations.AlterField( model_name=\"company\", name=\"payment_email\", field=models.EmailField( max_length=75,", "import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [(\"company\", \"0016_auto_20160315_1950\")]", "from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies =", "migrations, models class Migration(migrations.Migration): dependencies = [(\"company\", \"0016_auto_20160315_1950\")] operations = [ migrations.AlterField( model_name=\"company\",", "class Migration(migrations.Migration): dependencies = [(\"company\", \"0016_auto_20160315_1950\")] operations = [ migrations.AlterField( model_name=\"company\", name=\"payment_email\", field=models.EmailField(", "operations = [ migrations.AlterField( model_name=\"company\", name=\"payment_email\", field=models.EmailField( max_length=75, verbose_name=\"payment email\", blank=True ), preserve_default=True," ]
[ "2: parser.error( \"Error: --slowbound should contain 2 \" + \"comma-separated floats\") if args.bazbound", "2.0, -1.0] else: args.weights = [float(val) for val in args.weights.split(',')] if (len(args.weights)) !=", "= UTCDateTime(year+'-'+month+'-'+day) if dateUTC > tstart and dateUTC < tend: # Load meta", "else: hkstack.stack() # Average stacks hkstack.average(typ=args.typ) if args.plot: hkstack.plot(args.save_plot, args.title, args.form) if args.save:", "args = parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error(\"Input file \" +", "exist\") # create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',')", "for pre-processing of receiver function \" + \"data prior to H-k stacking\") PreGroup.add_argument(", "of H-k search, including\" + \"bounds on search, weights, type of stacking, etc.\")", "+ \"Options are 'P', 'PP', 'allP', 'S', 'SKS' or 'allS'. \" + \"[Default", "= args.weights # Stack with or without dip if args.calc_dip: hkstack.stack_dip() else: hkstack.stack()", "| '__| |_| '_ \\| | | | | '_ \\| |/ /", "/ _|_ __ _ _ | |__ | | __ #\") print(\"# |", "Client from obspy.core import Stream, UTCDateTime from rfpy import binning, plotting, HkStack from", "of station]\") PreGroup = parser.add_argument_group( title='Pre-processing Settings', description=\"Options for pre-processing of receiver function", "contain 2 \" + \"comma-separated floats\") if args.hbound is None: args.hbound = [20.,", "|_| '_ \\| | | | | '_ \\| |/ / #\") print(\"#", "strike and dip for this type \" + \"of analysis\") else: args.calc_dip =", "args.slowbound is None: args.slowbound = [0.04, 0.08] else: args.slowbound = [float(val) for val", "# Remove outliers wrt variance within time range medvarR = np.median(varR) madvarR =", "# This file is part of RfPy. # # Permission is hereby granted,", "\"[Default 36]\") PreGroup.add_argument( \"--nslow\", action=\"store\", dest=\"nslow\", type=int, default=40, help=\"Specify integer number of slowness", "< args.cc: continue ''' # Check bounds on data # if meta.slow <", "filename.is_file(): file = open(filename, \"rb\") rfdata = pickle.load(file) rfRstream.append(rfdata[1]) file.close() if rfdata[0].stats.npts !=", "= sorted(args.bp) if (len(args.bp)) != 2: parser.error( \"Error: --bp should contain 2 \"", "\") print(\"|===============================================|\") print(\"|===============================================|\") print(\"| {0:>8s} |\".format( sta.station)) print(\"|===============================================|\") print(\"|===============================================|\") print(\"| Station: {0:>2s}.{1:5s} |\".format(", "tr in rfRstream: if (tr.stats.nbin < args.binlim): rfRstream.remove(tr) # Continue if stream is", "station database. Partial keys will \" + \"be used to match against those", "functions. \" + \"[Default None]\") PreGroup.add_argument( \"--snrh\", action=\"store\", type=float, dest=\"snrh\", default=-9999, help=\"Specify the", "+ args.startT) else: args.startT = None # construct end time if len(args.endT) >", "the valid\" + \"matplotlib formats: 'png', 'jpg', 'eps', 'pdf'. [Default 'png']\") args =", "in ['P', 'PP', 'allP', 'S', 'SKS', 'allS']: parser.error( \"Error: choose between 'P', 'PP',", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR", "args.bazbound = [0.0, 360.0] else: args.bazbound = [float(val) for val in args.bazbound.split(',')] args.bazbound", "the corner frequencies for the bandpass filter. \" + \"[Default 0.05,0.5]\") PreGroup.add_argument( \"--nbaz\",", "weights), or 'product' for the product \" + \"of positive values in stacks.", "times [Default end date of station]\") PreGroup = parser.add_argument_group( title='Pre-processing Settings', description=\"Options for", "match with all stations in \" + \"the IU network [Default processes all", "\"bounds on Vp/Vs (k). [Default [1.56, 2.1]]\") HKGroup.add_argument( \"--dk\", action=\"store\", type=float, dest=\"dk\", default=0.02,", "== 'allS': args.listphase = ['S', 'SKS'] else: args.listphase = [args.phase] if args.typ not", "args.save: savepath = Path('HK_DATA') / stfld if not savepath.is_dir(): print('Path to '+str(savepath)+' doesn`t", "not exist\") # create station key list if len(args.stkeys) > 0: args.stkeys =", "args.strike is None and args.dip is None: args.calc_dip = False args.nbaz = None", "rfRstream_copy = rfRstream.copy() rfRstream_copy.filter('bandpass', freqmin=args.bp_copy[0], freqmax=args.bp_copy[1], corners=2, zerophase=True) # Check bin counts: for", "\"[Default False]\") parser.add_argument( \"-L\", \"--long-name\", action=\"store_true\", dest=\"lkey\", default=False, help=\"Force folder names to use", "name to plot. \" + \"Options are 'P', 'PP', 'allP', 'S', 'SKS' or", "must be \" + \"contained within the station database. Partial keys will \"", "print(\"# #\") print(\"#########################################\") print() # Run Input Parser args = get_hk_arguments() # Load", "rf_tmp = binning.bin_baz_slow(rfRstream, nbaz=args.nbaz+1, nslow=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0] else: rf_tmp = binning.bin(rfRstream,", "dateUTC > tstart and dateUTC < tend: # Load meta data metafile =", "for the bandpass filter. \" + \"[Default 0.05,0.5]\") PreGroup.add_argument( \"--nbaz\", action=\"store\", dest=\"nbaz\", type=int,", "floats\") if args.bazbound is None: args.bazbound = [0.0, 360.0] else: args.bazbound = [float(val)", "all stations in the database]\") parser.add_argument( \"-v\", \"-V\", \"--verbose\", action=\"store_true\", dest=\"verb\", default=False, help=\"Specify", "UTCDateTime compatible string representing \" + \"the start time for the search. This", "< args.slowbound[0] and meta.slow > args.slowbound[1]: # continue # if meta.baz < args.bazbound[0]", "to permit persons to whom the Software is # furnished to do so,", "dest=\"dk\", default=0.02, help=\"Specify search interval for k. [Default 0.02]\") HKGroup.add_argument( \"--weights\", action=\"store\", type=str,", "analysis\") else: args.calc_dip = True if args.bp is None: args.bp = [0.05, 0.5]", "start time if args.startT is None: tstart = sta.startdate else: tstart = args.startT", "depth (H, in km). [Default [20., 50.]]\") HKGroup.add_argument( \"--dh\", action=\"store\", type=float, dest=\"dh\", default=0.5,", "dest=\"stkeys\", default=\"\", help=\"Specify a comma separated list of station keys for \" +", "else: tstart = args.startT # Get search end time if args.endT is None:", "PreGroup.add_argument( \"--no-outlier\", action=\"store_true\", dest=\"no_outl\", default=False, help=\"Set this option to delete outliers based on", "data processing on-the-fly (requires web connection) \"\"\" parser = ArgumentParser( usage=\"%(prog)s [arguments] <station", "HKGroup.add_argument( \"--save\", action=\"store_true\", dest=\"save\", default=False, help=\"Set this option to save the HkStack object", "the horizontal component SNR threshold for \" + \"extracting receiver functions. [Default None]\")", "title [Default has no title]\") PlotGroup.add_argument( \"--format\", action=\"store\", type=str, dest=\"form\", default=\"png\", help=\"Specify format", "sta.startdate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"| End time: {0:19s} |\".format( sta.enddate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"|-----------------------------------------------|\") # Check for", "path if args.save: savepath = Path('HK_DATA') / stfld if not savepath.is_dir(): print('Path to", "bins to consider. \" + \"[Default 36]\") PreGroup.add_argument( \"--nslow\", action=\"store\", dest=\"nslow\", type=int, default=40,", "Stream, UTCDateTime from rfpy import binning, plotting, HkStack from pathlib import Path from", "from obspy.clients.fdsn import Client from obspy.core import Stream, UTCDateTime from rfpy import binning,", "help=\"Specify the CC threshold for extracting receiver functions. \" + \"[Default None]\") PreGroup.add_argument(", "and this permission notice shall be included in # all copies or substantial", "= [] for i in range(len(rfRstream)): taxis = rfRstream[i].stats.taxis tselect = (taxis >", "< 5: continue if args.save_plot and not Path('HK_PLOTS').is_dir(): Path('HK_PLOTS').mkdir(parents=True) print('') print(\"Number of radial", "2: parser.error( \"Error: --bazbound should contain 2 \" + \"comma-separated floats\") ## JMG", "JMG ## if args.slowbound is None: args.slowbound = [0.04, 0.08] else: args.slowbound =", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION", "| | | _| |_) | |_| | | | | | <", "sta.station)) print(\"| Channel: {0:2s}; Locations: {1:15s} |\".format( sta.channel, \",\".join(tlocs))) print(\"| Lon: {0:7.2f}; Lat:", "sta.location if len(tlocs) == 0: tlocs = [''] for il in range(0, len(tlocs)):", "print(\"Number of radial RF bins: \" + str(len(rfRstream))) print('') # Filter original stream", "RF data filename = folder / \"RF_Data.pkl\" if filename.is_file(): file = open(filename, \"rb\")", "== 0: tlocs = [''] for il in range(0, len(tlocs)): if len(tlocs[il]) ==", "dest=\"nslow\", type=int, default=40, help=\"Specify integer number of slowness bins to consider. \" +", "[20., 50.] else: args.hbound = [float(val) for val in args.hbound.split(',')] args.hbound = sorted(args.hbound)", "help=\"Specify the phase name to plot. \" + \"Options are 'P', 'PP', 'allP',", "is None: tstart = sta.startdate else: tstart = args.startT # Get search end", "None: parser.error(\"Specify both strike and dip for this type \" + \"of analysis\")", "to perform the analysis. These must be \" + \"contained within the station", "a list of three floats with for Ps, Pps and Pass \" +", "\" + args.endT) else: args.endT = None if args.strike is None and args.dip", "on Moho depth (H, in km). [Default [20., 50.]]\") HKGroup.add_argument( \"--dh\", action=\"store\", type=float,", "for extracting receiver functions. \" + \"[Default None]\") PreGroup.add_argument( \"--no-outlier\", action=\"store_true\", dest=\"no_outl\", default=False,", "\"Default behaviour uses short key form (NET.STN) for the folder \" + \"names,", "parser = ArgumentParser( usage=\"%(prog)s [arguments] <station database>\", description=\"Script used to process receiver function", "= savepath / (hkstack.rfV1[0].stats.station + \".hkstack.\"+args.typ+\".pkl\") hkstack.save(file=filename) # Update processed folders procfold.append(stfld) if", "!= 3: parser.error( \"Error: --weights should contain 3 \" + \"comma-separated floats\") return", "consider. \" + \"[Default 36]\") PreGroup.add_argument( \"--nslow\", action=\"store\", dest=\"nslow\", type=int, default=40, help=\"Specify integer", "sta.longitude, sta.latitude)) print(\"| Start time: {0:19s} |\".format( sta.startdate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"| End time: {0:19s}", "ArgumentParser from os.path import exists as exist from numpy import nan def get_hk_arguments(argv=None):", "to plot. \" + \"Options are 'P', 'PP', 'allP', 'S', 'SKS' or 'allS'.", "\"the times to include in searching for receiver function data\") TimeGroup.add_argument( \"--start\", action=\"store\",", "Input Parser args = get_hk_arguments() # Load Database db, stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys)", "except: hkstack = HkStack(rfRstream, strike=args.strike, dip=args.dip, vp=args.vp) # Update attributes hkstack.hbound = args.hbound", "MAD \" + \"on the variance. [Default False]\") PreGroup.add_argument( \"--slowbound\", action=\"store\", dest=\"slowbound\", type=str,", "--bp_copy should contain 2 \" + \"comma-separated floats\") if args.hbound is None: args.hbound", "station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') # construct start", "\"--type\", action=\"store\", type=str, dest=\"typ\", default=\"sum\", help=\"Specify type of final stacking. Options are: 'sum'", "np.array(varR) # Remove outliers wrt variance within time range medvarR = np.median(varR) madvarR", "it') savepath.mkdir(parents=True) # Get search start time if args.startT is None: tstart =", "is None: tend = sta.enddate else: tend = args.endT if tstart > sta.enddate", "pickle.load(open(metafile, 'rb')) # Skip data not in list of phases if meta.phase not", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN", "default=-1., help=\"Specify the CC threshold for extracting receiver functions. \" + \"[Default None]\")", "1.4826*np.median(np.abs(varR-medvarR)) robustR = np.abs((varR-medvarR)/madvarR) outliersR = np.arange(len(rfRstream))[robustR > 2.5] for i in outliersR[::-1]:", "filename = savepath / (hkstack.rfV1[0].stats.station + \".hkstack.\"+args.typ+\".pkl\") hkstack.save(file=filename) # Update processed folders procfold.append(stfld)", "\"Error: --weights should contain 3 \" + \"comma-separated floats\") return args def main():", "and maximum\" + \"bounds on Vp/Vs (k). [Default [1.56, 2.1]]\") HKGroup.add_argument( \"--dk\", action=\"store\",", "\"--title\", action=\"store\", type=str, dest=\"title\", default=\"\", help=\"Specify plot title [Default has no title]\") PlotGroup.add_argument(", "CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "this option to save the plot [Default doesn't save]\") PlotGroup.add_argument( \"--title\", action=\"store\", type=str,", "the product \" + \"of positive values in stacks. [Default 'sum']\") HKGroup.add_argument( \"--save\",", "(len(args.slowbound)) != 2: parser.error( \"Error: --slowbound should contain 2 \" + \"comma-separated floats\")", "objects. This function is used for data processing on-the-fly (requires web connection) \"\"\"", "produce a plot of the stacks [Default \" + \"does not produce plot]\")", "list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') # construct start time if", "save path if args.save: savepath = Path('HK_DATA') / stfld if not savepath.is_dir(): print('Path", "& (taxis < t2) varR.append(np.var(rfRstream[i].data[tselect])) varR = np.array(varR) # Remove outliers wrt variance", "args.dip is None: parser.error(\"Specify both strike and dip for this type \" +", "sorted(args.bp) if (len(args.bp)) != 2: parser.error( \"Error: --bp should contain 2 \" +", "[Default False]\") PreGroup.add_argument( \"--slowbound\", action=\"store\", dest=\"slowbound\", type=str, default=None, help=\"Specify a list of two", "\"a weighted average (using weights), or 'product' for the product \" + \"of", "\"--long-name\", action=\"store_true\", dest=\"lkey\", default=False, help=\"Force folder names to use long-key form (NET.STN.CHN). \"", "plotting the H-k stacks.\") PlotGroup.add_argument( \"--plot\", action=\"store_true\", dest=\"plot\", default=False, help=\"Set this option to", "figure. Can be any one of the valid\" + \"matplotlib formats: 'png', 'jpg',", "if x.is_dir()] for folder in datafiles: # Skip hidden folders if folder.name.startswith('.'): continue", "args.calc_dip = False args.nbaz = None elif args.strike is None or args.dip is", "if specified if args.calc_dip: rf_tmp = binning.bin_baz_slow(rfRstream, nbaz=args.nbaz+1, nslow=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0]", "None # construct end time if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT)", "database.\" ) # Event Selection Criteria TimeGroup = parser.add_argument_group( title=\"Time Settings\", description=\"Settings associated", "| __ #\") print(\"# | '__| |_| '_ \\| | | | |", "args.hbound hkstack.kbound = args.kbound hkstack.dh = args.dh hkstack.dk = args.dk hkstack.weights = args.weights", "dest=\"verb\", default=False, help=\"Specify to increase verbosity.\") parser.add_argument( \"-O\", \"--overwrite\", action=\"store_true\", dest=\"ovr\", default=False, help=\"Force", "plot of the stacks [Default \" + \"does not produce plot]\") PlotGroup.add_argument( \"--save-plot\",", "print(\"| Channel: {0:2s}; Locations: {1:15s} |\".format( sta.channel, \",\".join(tlocs))) print(\"| Lon: {0:7.2f}; Lat: {1:6.2f}", "'PP', 'allP', 'S', 'SKS', 'allS']: parser.error( \"Error: choose between 'P', 'PP', 'allP', 'S',", "DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "has no title]\") PlotGroup.add_argument( \"--format\", action=\"store\", type=str, dest=\"form\", default=\"png\", help=\"Specify format of figure.", "\" + \"[Default 40]\") PreGroup.add_argument( \"--snr\", action=\"store\", type=float, dest=\"snr\", default=-9999., help=\"Specify the SNR", "for \" + \"extracting receiver functions. [Default None]\") PreGroup.add_argument( \"--cc\", action=\"store\", type=float, dest=\"cc\",", "Folder Name stfld = stkey if not args.lkey: stfld = stkey.split('.')[0]+\".\"+stkey.split('.')[1] # Define", "t1 = 0. t2 = 30. varR = [] for i in range(len(rfRstream)):", "if (len(args.hbound)) != 2: parser.error( \"Error: --hbound should contain 2 \" + \"comma-separated", "should contain 3 \" + \"comma-separated floats\") return args def main(): print() print(\"#########################################\")", "= ['P', 'PP'] elif args.phase == 'allS': args.listphase = ['S', 'SKS'] else: args.listphase", "args.copy: rfRstream_copy = rfRstream.copy() rfRstream_copy.filter('bandpass', freqmin=args.bp_copy[0], freqmax=args.bp_copy[1], corners=2, zerophase=True) # Check bin counts:", "NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "pathlib import Path from argparse import ArgumentParser from os.path import exists as exist", "software and associated documentation files (the \"Software\"), to deal # in the Software", "weights, type of stacking, etc.\") HKGroup.add_argument( \"--hbound\", action=\"store\", type=str, dest=\"hbound\", default=None, help=\"Specify a", "\\__, |___|_| |_|_|\\_\\ #\") print(\"# |_| |___/_____| #\") print(\"# #\") print(\"#########################################\") print() #", "this option to delete outliers based on the MAD \" + \"on the", "station keys for \" + \"which to perform the analysis. These must be", "help=\"Specify a UTCDateTime compatible string representing \" + \"the end time for the", "tlocs = [''] for il in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il]", "(using weights), or 'product' for the product \" + \"of positive values in", "Pass \" + \"weights in final stack. [Default [0.5, 2., -1.]]\") HKGroup.add_argument( \"--type\",", "(NET.STN.CHN). \" + \"Default behaviour uses short key form (NET.STN) for the folder", "360.0] else: args.bazbound = [float(val) for val in args.bazbound.split(',')] args.bazbound = sorted(args.bazbound) if", "and to permit persons to whom the Software is # furnished to do", "\"station start times. [Default start date of station]\") TimeGroup.add_argument( \"--end\", action=\"store\", type=str, dest=\"endT\",", "radial component \" + \"filtered at different corners for the Pps and Pss", "of final stacking. Options are: 'sum' for \" + \"a weighted average (using", "in args.bp.split(',')] args.bp = sorted(args.bp) if (len(args.bp)) != 2: parser.error( \"Error: --bp should", "Pps and Pss phases. \" + \"[Default False]\") PreGroup.add_argument( \"--bp-copy\", action=\"store\", dest=\"bp_copy\", type=str,", "construct start time if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except: parser.error(", "# furnished to do so, subject to the following conditions: # # The", "the Software, and to permit persons to whom the Software is # furnished", "PlotGroup.add_argument( \"--format\", action=\"store\", type=str, dest=\"form\", default=\"png\", help=\"Specify format of figure. Can be any", "2., -1.]]\") HKGroup.add_argument( \"--type\", action=\"store\", type=str, dest=\"typ\", default=\"sum\", help=\"Specify type of final stacking.", "list of phases if meta.phase not in args.listphase: continue # QC Thresholding if", "vp=args.vp) except: hkstack = HkStack(rfRstream, strike=args.strike, dip=args.dip, vp=args.vp) # Update attributes hkstack.hbound =", "functions. [Default None]\") PreGroup.add_argument( \"--cc\", action=\"store\", type=float, dest=\"cc\", default=-1., help=\"Specify the CC threshold", "print(\"| {0:>8s} |\".format( sta.station)) print(\"|===============================================|\") print(\"|===============================================|\") print(\"| Station: {0:>2s}.{1:5s} |\".format( sta.network, sta.station)) print(\"|", "hkstack.stack() # Average stacks hkstack.average(typ=args.typ) if args.plot: hkstack.plot(args.save_plot, args.title, args.form) if args.save: filename", "import ArgumentParser from os.path import exists as exist from numpy import nan def", "object try: hkstack = HkStack(rfRstream, rfV2=rfRstream_copy, strike=args.strike, dip=args.dip, vp=args.vp) except: hkstack = HkStack(rfRstream,", "'S', 'SKS', 'allS']: parser.error( \"Error: choose between 'P', 'PP', 'allP', 'S', 'SKS' and", "print(\" \") print(\"|===============================================|\") print(\"|===============================================|\") print(\"| {0:>8s} |\".format( sta.station)) print(\"|===============================================|\") print(\"|===============================================|\") print(\"| Station: {0:>2s}.{1:5s}", "string representing \" + \"the end time for the search. This will override", "['P', 'PP', 'allP', 'S', 'SKS', 'allS']: parser.error( \"Error: choose between 'P', 'PP', 'allP',", "sorted(args.kbound) if (len(args.kbound)) != 2: parser.error( \"Error: --kbound should contain 2 \" +", "= folder / \"Meta_Data.pkl\" if not metafile.is_file(): continue meta = pickle.load(open(metafile, 'rb')) #", "ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "type=str, default=None, help=\"Specify a list of two floats with minimum and maximum\" +", "of slowness bins to consider. \" + \"[Default 40]\") PreGroup.add_argument( \"--snr\", action=\"store\", type=float,", "db, stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys) # Track processed folders procfold = [] #", "['sum', 'product']: parser.error( \"Error: choose between 'sum' and 'product'\") if args.copy: if args.bp_copy", "action=\"store_true\", dest=\"lkey\", default=False, help=\"Force folder names to use long-key form (NET.STN.CHN). \" +", "floats with minimum and maximum\" + \"bounds on slowness (s/km). [Default [0.04, 0.08]]\")", "if (len(args.bazbound)) != 2: parser.error( \"Error: --bazbound should contain 2 \" + \"comma-separated", "time for the search. This will override any \" + \"station start times.", "stkey in list(stkeys): # Extract station information from dictionary sta = db[stkey] #", "= rf_tmp[0] else: rf_tmp = binning.bin(rfRstream, typ='slow', nbin=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0] #", "default=None, help=\"Specify a list of two floats with minimum and maximum\" + \"frequency", "date[4:6] day = date[6:8] dateUTC = UTCDateTime(year+'-'+month+'-'+day) if dateUTC > tstart and dateUTC", "default=False, help=\"Set this option to use phase-weighted stacking during binning \" + \"", "and Pass \" + \"weights in final stack. [Default [0.5, 2., -1.]]\") HKGroup.add_argument(", "phases. \" + \"[Default False]\") PreGroup.add_argument( \"--bp-copy\", action=\"store\", dest=\"bp_copy\", type=str, default=None, help=\"Specify a", "corner frequencies for the bandpass filter. \" + \"[Default 0.05,0.5]\") PreGroup.add_argument( \"--nbaz\", action=\"store\",", "type=str, dest=\"hbound\", default=None, help=\"Specify a list of two floats with minimum and maximum\"", "of dipping Moho. [Default None]\") ModelGroup.add_argument( \"--dip\", action=\"store\", type=float, dest=\"dip\", default=None, help=\"Specify the", "_ _ | |__ | | __ #\") print(\"# | '__| |_| '_", "is too short if len(rfRstream) < 5: continue if args.save_plot and not Path('HK_PLOTS').is_dir():", "\"instance, providing IU will match with all stations in \" + \"the IU", "6.0]\") ModelGroup.add_argument( \"--strike\", action=\"store\", type=float, dest=\"strike\", default=None, help=\"Specify the strike of dipping Moho.", "action=\"store\", type=str, dest=\"form\", default=\"png\", help=\"Specify format of figure. Can be any one of", "{0:>2s}.{1:5s} |\".format( sta.network, sta.station)) print(\"| Channel: {0:2s}; Locations: {1:15s} |\".format( sta.channel, \",\".join(tlocs))) print(\"|", "maximum\" + \"frequency for the copies stream (Hz). [Default [0.05, 0.35]]\") HKGroup =", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH", "station]\") PreGroup = parser.add_argument_group( title='Pre-processing Settings', description=\"Options for pre-processing of receiver function \"", "extracting receiver functions. \" + \"[Default None]\") PreGroup.add_argument( \"--no-outlier\", action=\"store_true\", dest=\"no_outl\", default=False, help=\"Set", "\"--start\", action=\"store\", type=str, dest=\"startT\", default=\"\", help=\"Specify a UTCDateTime compatible string representing \" +", "#\") print(\"# _ __ / _|_ __ _ _ | |__ | |", "= UTCDateTime(args.endT) except: parser.error( \"Cannot construct UTCDateTime from end time: \" + args.endT)", "strike=args.strike, dip=args.dip, vp=args.vp) # Update attributes hkstack.hbound = args.hbound hkstack.kbound = args.kbound hkstack.dh", "> 0: try: args.endT = UTCDateTime(args.endT) except: parser.error( \"Cannot construct UTCDateTime from end", "'allS'.\") if args.phase == 'allP': args.listphase = ['P', 'PP'] elif args.phase == 'allS':", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT", "contain 2 \" + \"comma-separated floats\") ## JMG ## if args.slowbound is None:", "should contain 2 \" + \"comma-separated floats\") ## JMG ## if args.slowbound is", "os.path import exists as exist from numpy import nan def get_hk_arguments(argv=None): \"\"\" Get", "to consider. \" + \"[Default 40]\") PreGroup.add_argument( \"--snr\", action=\"store\", type=float, dest=\"snr\", default=-9999., help=\"Specify", "# # This file is part of RfPy. # # Permission is hereby", "These must be \" + \"contained within the station database. Partial keys will", "/ stfld if not savepath.is_dir(): print('Path to '+str(savepath)+' doesn`t exist - creating it')", "type \" + \"of analysis\") else: args.calc_dip = True if args.bp is None:", "nbin=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0] # Get a copy of the radial component", "\" + \"on the variance. [Default False]\") PreGroup.add_argument( \"--slowbound\", action=\"store\", dest=\"slowbound\", type=str, default=None,", "Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "\"--snrh\", action=\"store\", type=float, dest=\"snrh\", default=-9999, help=\"Specify the horizontal component SNR threshold for \"", "Loop over station keys for stkey in list(stkeys): # Extract station information from", "for the product \" + \"of positive values in stacks. [Default 'sum']\") HKGroup.add_argument(", "meta.phase not in args.listphase: continue # QC Thresholding if meta.snrh < args.snrh: continue", "if meta.snr < args.snr: continue if meta.cc < args.cc: continue ''' # Check", "interval for H (km). [Default 0.5]\") HKGroup.add_argument( \"--kbound\", action=\"store\", type=str, dest=\"kbound\", default=None, help=\"Specify", "will override any \" + \"station end times [Default end date of station]\")", "dictionary sta = db[stkey] # Construct Folder Name stfld = stkey if not", "\" + \"the start time for the search. This will override any \"", "to deal # in the Software without restriction, including without limitation the rights", "based on the MAD \" + \"on the variance. [Default False]\") PreGroup.add_argument( \"--slowbound\",", "type=float, dest=\"dk\", default=0.02, help=\"Specify search interval for k. [Default 0.02]\") HKGroup.add_argument( \"--weights\", action=\"store\",", "to any person obtaining a copy # of this software and associated documentation", "Settings ModelGroup = parser.add_argument_group( title='Model Settings', description=\"Miscellaneous default values and settings\") ModelGroup.add_argument( \"--vp\",", "varR = np.array(varR) # Remove outliers wrt variance within time range medvarR =", "with or without dip if args.calc_dip: hkstack.stack_dip() else: hkstack.stack() # Average stacks hkstack.average(typ=args.typ)", "HKGroup.add_argument( \"--hbound\", action=\"store\", type=str, dest=\"hbound\", default=None, help=\"Specify a list of two floats with", "'allP', 'S', 'SKS' or 'allS'. \" + \"[Default 'allP']\") PreGroup.add_argument( \"--copy\", action=\"store_true\", dest=\"copy\",", "1451: print(folder) if len(rfRstream) == 0: continue if args.no_outl: t1 = 0. t2", "action=\"store\", type=str, dest=\"weights\", default=None, help=\"Specify a list of three floats with for Ps,", "+ \"weights in final stack. [Default [0.5, 2., -1.]]\") HKGroup.add_argument( \"--type\", action=\"store\", type=str,", "action=\"store\", dest=\"slowbound\", type=str, default=None, help=\"Specify a list of two floats with minimum and", "OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #", "dest=\"save_plot\", default=False, help=\"Set this option to save the plot [Default doesn't save]\") PlotGroup.add_argument(", "None or args.dip is None: parser.error(\"Specify both strike and dip for this type", "args.kbound = [1.56, 2.1] else: args.kbound = [float(val) for val in args.kbound.split(',')] args.kbound", "datapath.iterdir() if x.is_dir()] for folder in datafiles: # Skip hidden folders if folder.name.startswith('.'):", "[0.05, 0.35]]\") HKGroup = parser.add_argument_group( title='Settings for H-k Stacking', description=\"Specify parameters of H-k", "0.5]\") HKGroup.add_argument( \"--kbound\", action=\"store\", type=str, dest=\"kbound\", default=None, help=\"Specify a list of two floats", "processing on-the-fly (requires web connection) \"\"\" parser = ArgumentParser( usage=\"%(prog)s [arguments] <station database>\",", "(hkstack.rfV1[0].stats.station + \".hkstack.\"+args.typ+\".pkl\") hkstack.save(file=filename) # Update processed folders procfold.append(stfld) if __name__ == \"__main__\":", "back-azimuth bins to consider. \" + \"[Default 36]\") PreGroup.add_argument( \"--nslow\", action=\"store\", dest=\"nslow\", type=int,", "H-k search, including\" + \"bounds on search, weights, type of stacking, etc.\") HKGroup.add_argument(", "parser.error(\"Specify both strike and dip for this type \" + \"of analysis\") else:", "meta.cc < args.cc: continue ''' # Check bounds on data # if meta.slow", "delete outliers based on the MAD \" + \"on the variance. [Default False]\")", "(len(args.bp)) != 2: parser.error( \"Error: --bp should contain 2 \" + \"comma-separated floats\")", "'allP', 'S', 'SKS' and 'allS'.\") if args.phase == 'allP': args.listphase = ['P', 'PP']", "\"-v\", \"-V\", \"--verbose\", action=\"store_true\", dest=\"verb\", default=False, help=\"Specify to increase verbosity.\") parser.add_argument( \"-O\", \"--overwrite\",", "dest=\"copy\", default=False, help=\"Set this option to use a copy of the radial component", "for val in args.hbound.split(',')] args.hbound = sorted(args.hbound) if (len(args.hbound)) != 2: parser.error( \"Error:", "stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys) # Track processed folders procfold = [] # Loop", "\" + \"for H-k stacking.\") # General Settings parser.add_argument( \"indb\", help=\"Station Database to", "strike of dipping Moho. [Default None]\") ModelGroup.add_argument( \"--dip\", action=\"store\", type=float, dest=\"dip\", default=None, help=\"Specify", "parser.error( \"Error: --kbound should contain 2 \" + \"comma-separated floats\") if args.weights is", "+ \"station end times [Default end date of station]\") PreGroup = parser.add_argument_group( title='Pre-processing", "exists if args.phase in ['P', 'PP', 'allP']: datapath = Path('P_DATA') / stfld elif", "end time for the search. This will override any \" + \"station end", "short key form (NET.STN) for the folder \" + \"names, regardless of the", "default=\"png\", help=\"Specify format of figure. Can be any one of the valid\" +", "this software and associated documentation files (the \"Software\"), to deal # in the", "str(datapath) + ' doesn`t exist - continuing') continue # Define save path if", "to consider. \" + \"[Default 36]\") PreGroup.add_argument( \"--nslow\", action=\"store\", dest=\"nslow\", type=int, default=40, help=\"Specify", "at different corners for the Pps and Pss phases. \" + \"[Default False]\")", "args.kbound is None: args.kbound = [1.56, 2.1] else: args.kbound = [float(val) for val", "PlotGroup.add_argument( \"--plot\", action=\"store_true\", dest=\"plot\", default=False, help=\"Set this option to produce a plot of", "title='Settings for H-k Stacking', description=\"Specify parameters of H-k search, including\" + \"bounds on", "+ \".hkstack.\"+args.typ+\".pkl\") hkstack.save(file=filename) # Update processed folders procfold.append(stfld) if __name__ == \"__main__\": #", "the radial component \" + \"filtered at different corners for the Pps and", "HkStack object try: hkstack = HkStack(rfRstream, rfV2=rfRstream_copy, strike=args.strike, dip=args.dip, vp=args.vp) except: hkstack =", "if args.kbound is None: args.kbound = [1.56, 2.1] else: args.kbound = [float(val) for", "Path('HK_PLOTS').mkdir(parents=True) print('') print(\"Number of radial RF bins: \" + str(len(rfRstream))) print('') # Filter", "if args.startT is None: tstart = sta.startdate else: tstart = args.startT # Get", "granted, free of charge, to any person obtaining a copy # of this", "else: args.bp_copy = [float(val) for val in args.bp_copy.split(',')] args.bp_copy = sorted(args.bp_copy) if (len(args.bp_copy))", "[Default 3]\") PreGroup.add_argument( \"--bp\", action=\"store\", type=str, dest=\"bp\", default=None, help=\"Specify the corner frequencies for", "= np.array(varR) # Remove outliers wrt variance within time range medvarR = np.median(varR)", "\"--format\", action=\"store\", type=str, dest=\"form\", default=\"png\", help=\"Specify format of figure. Can be any one", "pws=args.pws) rfRstream = rf_tmp[0] else: rf_tmp = binning.bin(rfRstream, typ='slow', nbin=args.nslow+1, pws=args.pws) rfRstream =", "2 \" + \"comma-separated floats\") if args.bazbound is None: args.bazbound = [0.0, 360.0]", "|___/_____| #\") print(\"# #\") print(\"#########################################\") print() # Run Input Parser args = get_hk_arguments()", "Settings\", description=\"Settings associated with refining \" + \"the times to include in searching", "help=\"Set this option to delete outliers based on the MAD \" + \"on", "HKGroup.add_argument( \"--weights\", action=\"store\", type=str, dest=\"weights\", default=None, help=\"Specify a list of three floats with", "= args.startT # Get search end time if args.endT is None: tend =", "should contain 2 \" + \"comma-separated floats\") if args.weights is None: args.weights =", "default=False, help=\"Specify to increase verbosity.\") parser.add_argument( \"-O\", \"--overwrite\", action=\"store_true\", dest=\"ovr\", default=False, help=\"Force the", "dest=\"title\", default=\"\", help=\"Specify plot title [Default has no title]\") PlotGroup.add_argument( \"--format\", action=\"store\", type=str,", "# Permission is hereby granted, free of charge, to any person obtaining a", "\"--no-outlier\", action=\"store_true\", dest=\"no_outl\", default=False, help=\"Set this option to delete outliers based on the", "\"comma-separated floats\") return args def main(): print() print(\"#########################################\") print(\"# __ _ _ #\")", "| | | | | < #\") print(\"# |_| |_| | .__/ \\__,", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF", "print(\"#########################################\") print(\"# __ _ _ #\") print(\"# _ __ / _|_ __ _", "HkStack from pathlib import Path from argparse import ArgumentParser from os.path import exists", "stacking during binning \" + \" [Default False]\") PreGroup.add_argument( \"--phase\", action=\"store\", type=str, dest=\"phase\",", "None: args.hbound = [20., 50.] else: args.hbound = [float(val) for val in args.hbound.split(',')]", "dest=\"startT\", default=\"\", help=\"Specify a UTCDateTime compatible string representing \" + \"the start time", "args.indb + \" does not exist\") # create station key list if len(args.stkeys)", "of the database.\" ) # Event Selection Criteria TimeGroup = parser.add_argument_group( title=\"Time Settings\",", "[Default None]\") PreGroup.add_argument( \"--cc\", action=\"store\", type=float, dest=\"cc\", default=-1., help=\"Specify the CC threshold for", "taxis = rfRstream[i].stats.taxis tselect = (taxis > t1) & (taxis < t2) varR.append(np.var(rfRstream[i].data[tselect]))", "[Default [0, 360]]\") PreGroup.add_argument( \"--pws\", action=\"store_true\", dest=\"pws\", default=False, help=\"Set this option to use", "end times [Default end date of station]\") PreGroup = parser.add_argument_group( title='Pre-processing Settings', description=\"Options", "[Default end date of station]\") PreGroup = parser.add_argument_group( title='Pre-processing Settings', description=\"Options for pre-processing", "\"--binlim\", action=\"store\", type=float, dest=\"binlim\", default=1, help=\"Specify the minimum number of RFs in each", "phase-weighted stacking during binning \" + \" [Default False]\") PreGroup.add_argument( \"--phase\", action=\"store\", type=str,", "not exist(args.indb): parser.error(\"Input file \" + args.indb + \" does not exist\") #", "+ \"[Default 'allP']\") PreGroup.add_argument( \"--copy\", action=\"store_true\", dest=\"copy\", default=False, help=\"Set this option to use", "HKGroup.add_argument( \"--dk\", action=\"store\", type=float, dest=\"dk\", default=0.02, help=\"Specify search interval for k. [Default 0.02]\")", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION", "#\") print(\"#########################################\") print() # Run Input Parser args = get_hk_arguments() # Load Database", "sta.startdate else: tstart = args.startT # Get search end time if args.endT is", "args.hbound = [float(val) for val in args.hbound.split(',')] args.hbound = sorted(args.hbound) if (len(args.hbound)) !=", "without restriction, including without limitation the rights # to use, copy, modify, merge,", "[0.5, 2.0, -1.0] else: args.weights = [float(val) for val in args.weights.split(',')] if (len(args.weights))", "in the database]\") parser.add_argument( \"-v\", \"-V\", \"--verbose\", action=\"store_true\", dest=\"verb\", default=False, help=\"Specify to increase", "type=str, dest=\"title\", default=\"\", help=\"Specify plot title [Default has no title]\") PlotGroup.add_argument( \"--format\", action=\"store\",", "\" + \"[Default 'allP']\") PreGroup.add_argument( \"--copy\", action=\"store_true\", dest=\"copy\", default=False, help=\"Set this option to", "within the station database. Partial keys will \" + \"be used to match", "'.format(stfld)) continue rfRstream = Stream() datafiles = [x for x in datapath.iterdir() if", "3]\") PreGroup.add_argument( \"--bp\", action=\"store\", type=str, dest=\"bp\", default=None, help=\"Specify the corner frequencies for the", "+ \"station start times. [Default start date of station]\") TimeGroup.add_argument( \"--end\", action=\"store\", type=str,", "plot title [Default has no title]\") PlotGroup.add_argument( \"--format\", action=\"store\", type=str, dest=\"form\", default=\"png\", help=\"Specify", "# if meta.slow < args.slowbound[0] and meta.slow > args.slowbound[1]: # continue # if", "%H:%M:%S\"))) print(\"| End time: {0:19s} |\".format( sta.enddate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"|-----------------------------------------------|\") # Check for folder", "+ \"[Default 36]\") PreGroup.add_argument( \"--nslow\", action=\"store\", dest=\"nslow\", type=int, default=40, help=\"Specify integer number of", "< args.bazbound[0] and meta.baz > args.bazbound[1]: # continue ''' # If everything passed,", "args.startT = UTCDateTime(args.startT) except: parser.error( \"Cannot construct UTCDateTime from start time: \" +", "copies of the Software, and to permit persons to whom the Software is", "receiver functions. [Default None]\") PreGroup.add_argument( \"--cc\", action=\"store\", type=float, dest=\"cc\", default=-1., help=\"Specify the CC", "args.weights = [float(val) for val in args.weights.split(',')] if (len(args.weights)) != 3: parser.error( \"Error:", "| '_ \\| |/ / #\") print(\"# | | | _| |_) |", "= [float(val) for val in args.hbound.split(',')] args.hbound = sorted(args.hbound) if (len(args.hbound)) != 2:", "station]\") TimeGroup.add_argument( \"--end\", action=\"store\", type=str, dest=\"endT\", default=\"\", help=\"Specify a UTCDateTime compatible string representing", "For \" + \"instance, providing IU will match with all stations in \"", "+ \"comma-separated floats\") ## JMG ## if args.slowbound is None: args.slowbound = [0.04,", "not args.lkey: stfld = stkey.split('.')[0]+\".\"+stkey.split('.')[1] # Define path to see if it exists", "action=\"store\", type=str, dest=\"kbound\", default=None, help=\"Specify a list of two floats with minimum and", "= args.dk hkstack.weights = args.weights # Stack with or without dip if args.calc_dip:", "in final stack. [Default [0.5, 2., -1.]]\") HKGroup.add_argument( \"--type\", action=\"store\", type=str, dest=\"typ\", default=\"sum\",", "print(\"# | '__| |_| '_ \\| | | | | '_ \\| |/", "of the valid\" + \"matplotlib formats: 'png', 'jpg', 'eps', 'pdf'. [Default 'png']\") args", "obtaining a copy # of this software and associated documentation files (the \"Software\"),", "i in outliersR[::-1]: rfRstream.remove(rfRstream[i]) print('') print(\"Number of radial RF data: \" + str(len(rfRstream)))", "= sorted(args.slowbound) if (len(args.slowbound)) != 2: parser.error( \"Error: --slowbound should contain 2 \"", "override any \" + \"station end times [Default end date of station]\") PreGroup", "if meta.phase not in args.listphase: continue # QC Thresholding if meta.snrh < args.snrh:", "from numpy import nan def get_hk_arguments(argv=None): \"\"\" Get Options from :class:`~optparse.OptionParser` objects. This", "if not exist(args.indb): parser.error(\"Input file \" + args.indb + \" does not exist\")", "is None: args.hbound = [20., 50.] else: args.hbound = [float(val) for val in", "not savepath.is_dir(): print('Path to '+str(savepath)+' doesn`t exist - creating it') savepath.mkdir(parents=True) # Get", "load the RF data filename = folder / \"RF_Data.pkl\" if filename.is_file(): file =", "the RF data filename = folder / \"RF_Data.pkl\" if filename.is_file(): file = open(filename,", "'allP']\") PreGroup.add_argument( \"--copy\", action=\"store_true\", dest=\"copy\", default=False, help=\"Set this option to use a copy", "+ str(len(rfRstream))) print('') # Try binning if specified if args.calc_dip: rf_tmp = binning.bin_baz_slow(rfRstream,", "continue # QC Thresholding if meta.snrh < args.snrh: continue if meta.snr < args.snr:", "\"--overwrite\", action=\"store_true\", dest=\"ovr\", default=False, help=\"Force the overwriting of pre-existing data. \" + \"[Default", "{0:2s}; Locations: {1:15s} |\".format( sta.channel, \",\".join(tlocs))) print(\"| Lon: {0:7.2f}; Lat: {1:6.2f} |\".format( sta.longitude,", "+ \"filtered at different corners for the Pps and Pss phases. \" +", "default=False, help=\"Set this option to use a copy of the radial component \"", "and maximum\" + \"bounds on Moho depth (H, in km). [Default [20., 50.]]\")", "| < #\") print(\"# |_| |_| | .__/ \\__, |___|_| |_|_|\\_\\ #\") print(\"#", "time for the search. This will override any \" + \"station end times", "for receiver function data\") TimeGroup.add_argument( \"--start\", action=\"store\", type=str, dest=\"startT\", default=\"\", help=\"Specify a UTCDateTime", "database>\", description=\"Script used to process receiver function data \" + \"for H-k stacking.\")", "Remove outliers wrt variance within time range medvarR = np.median(varR) madvarR = 1.4826*np.median(np.abs(varR-medvarR))", "+ \"the times to include in searching for receiver function data\") TimeGroup.add_argument( \"--start\",", "procfold = [] # Loop over station keys for stkey in list(stkeys): #", "args.dk hkstack.weights = args.weights # Stack with or without dip if args.calc_dip: hkstack.stack_dip()", "separated list of station keys for \" + \"which to perform the analysis.", "args.bp.split(',')] args.bp = sorted(args.bp) if (len(args.bp)) != 2: parser.error( \"Error: --bp should contain", "any person obtaining a copy # of this software and associated documentation files", "contain 3 \" + \"comma-separated floats\") return args def main(): print() print(\"#########################################\") print(\"#", "print(\"Number of radial RF data: \" + str(len(rfRstream))) print('') # Try binning if", "Average stacks hkstack.average(typ=args.typ) if args.plot: hkstack.plot(args.save_plot, args.title, args.form) if args.save: filename = savepath", "Check bounds on data # if meta.slow < args.slowbound[0] and meta.slow > args.slowbound[1]:", "parser.add_argument_group( title='Model Settings', description=\"Miscellaneous default values and settings\") ModelGroup.add_argument( \"--vp\", action=\"store\", type=float, dest=\"vp\",", "python # Copyright 2019 <NAME> # # This file is part of RfPy.", "to produce a plot of the stacks [Default \" + \"does not produce", "of two floats with minimum and maximum\" + \"frequency for the copies stream", ") # Event Selection Criteria TimeGroup = parser.add_argument_group( title=\"Time Settings\", description=\"Settings associated with", "try: args.startT = UTCDateTime(args.startT) except: parser.error( \"Cannot construct UTCDateTime from start time: \"", "None: args.bazbound = [0.0, 360.0] else: args.bazbound = [float(val) for val in args.bazbound.split(',')]", "# Loop over station keys for stkey in list(stkeys): # Extract station information", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED,", "a copy # of this software and associated documentation files (the \"Software\"), to", "regardless of the key type of the database.\" ) # Event Selection Criteria", "metafile = folder / \"Meta_Data.pkl\" if not metafile.is_file(): continue meta = pickle.load(open(metafile, 'rb'))", "< args.snrh: continue if meta.snr < args.snr: continue if meta.cc < args.cc: continue", "#!/usr/bin/env python # Copyright 2019 <NAME> # # This file is part of", "\"which to perform the analysis. These must be \" + \"contained within the", "+ \"does not produce plot]\") PlotGroup.add_argument( \"--save-plot\", action=\"store_true\", dest=\"save_plot\", default=False, help=\"Set this option", "data metafile = folder / \"Meta_Data.pkl\" if not metafile.is_file(): continue meta = pickle.load(open(metafile,", "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #", "(the \"Software\"), to deal # in the Software without restriction, including without limitation", "i in range(len(rfRstream)): taxis = rfRstream[i].stats.taxis tselect = (taxis > t1) & (taxis", "vp=args.vp) # Update attributes hkstack.hbound = args.hbound hkstack.kbound = args.kbound hkstack.dh = args.dh", "[float(val) for val in args.bp_copy.split(',')] args.bp_copy = sorted(args.bp_copy) if (len(args.bp_copy)) != 2: parser.error(", "data: \" + str(len(rfRstream))) print('') # Try binning if specified if args.calc_dip: rf_tmp", "this option to use phase-weighted stacking during binning \" + \" [Default False]\")", "args.bp_copy is None: args.bp_copy = [0.05, 0.35] else: args.bp_copy = [float(val) for val", "with for Ps, Pps and Pass \" + \"weights in final stack. [Default", "charge, to any person obtaining a copy # of this software and associated", "short if len(rfRstream) < 5: continue if args.save_plot and not Path('HK_PLOTS').is_dir(): Path('HK_PLOTS').mkdir(parents=True) print('')", "for \" + \"which to perform the analysis. These must be \" +", "connection) \"\"\" parser = ArgumentParser( usage=\"%(prog)s [arguments] <station database>\", description=\"Script used to process", "(tr.stats.nbin < args.binlim): rfRstream.remove(tr) # Continue if stream is too short if len(rfRstream)", "UTCDateTime(args.endT) except: parser.error( \"Cannot construct UTCDateTime from end time: \" + args.endT) else:", "the strike of dipping Moho. [Default None]\") ModelGroup.add_argument( \"--dip\", action=\"store\", type=float, dest=\"dip\", default=None,", "(len(args.weights)) != 3: parser.error( \"Error: --weights should contain 3 \" + \"comma-separated floats\")", "# Continue if stream is too short if len(rfRstream) < 5: continue if", "if args.copy: rfRstream_copy = rfRstream.copy() rfRstream_copy.filter('bandpass', freqmin=args.bp_copy[0], freqmax=args.bp_copy[1], corners=2, zerophase=True) # Check bin", "= np.median(varR) madvarR = 1.4826*np.median(np.abs(varR-medvarR)) robustR = np.abs((varR-medvarR)/madvarR) outliersR = np.arange(len(rfRstream))[robustR > 2.5]", "a copy of the radial component \" + \"filtered at different corners for", "dest=\"hbound\", default=None, help=\"Specify a list of two floats with minimum and maximum\" +", "does not exist\") # create station key list if len(args.stkeys) > 0: args.stkeys", "in args.weights.split(',')] if (len(args.weights)) != 3: parser.error( \"Error: --weights should contain 3 \"", "limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or", "0.05,0.5]\") PreGroup.add_argument( \"--nbaz\", action=\"store\", dest=\"nbaz\", type=int, default=36, help=\"Specify integer number of back-azimuth bins", "ArgumentParser( usage=\"%(prog)s [arguments] <station database>\", description=\"Script used to process receiver function data \"", "search, weights, type of stacking, etc.\") HKGroup.add_argument( \"--hbound\", action=\"store\", type=str, dest=\"hbound\", default=None, help=\"Specify", "{0:19s} |\".format( sta.enddate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"|-----------------------------------------------|\") # Check for folder already processed if stfld", "print(folder) if len(rfRstream) == 0: continue if args.no_outl: t1 = 0. t2 =", "'PP', 'allP']: datapath = Path('P_DATA') / stfld elif args.phase in ['S', 'SKS', 'allS']:", "print('Path to ' + str(datapath) + ' doesn`t exist - continuing') continue #", "action=\"store\", dest=\"nbaz\", type=int, default=36, help=\"Specify integer number of back-azimuth bins to consider. \"", "times to include in searching for receiver function data\") TimeGroup.add_argument( \"--start\", action=\"store\", type=str,", "for the copies stream (Hz). [Default [0.05, 0.35]]\") HKGroup = parser.add_argument_group( title='Settings for", "General Settings parser.add_argument( \"indb\", help=\"Station Database to process from.\", type=str) parser.add_argument( \"--keys\", action=\"store\",", "bin. [Default 3]\") PreGroup.add_argument( \"--bp\", action=\"store\", type=str, dest=\"bp\", default=None, help=\"Specify the corner frequencies", "for tr in rfRstream: if (tr.stats.nbin < args.binlim): rfRstream.remove(tr) # Continue if stream", "True if args.bp is None: args.bp = [0.05, 0.5] else: args.bp = [float(val)", "action=\"store\", type=float, dest=\"dip\", default=None, help=\"Specify the dip of dipping Moho. [Default None]\") PlotGroup", "if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except: parser.error( \"Cannot construct UTCDateTime", "> 0: try: args.startT = UTCDateTime(args.startT) except: parser.error( \"Cannot construct UTCDateTime from start", "receiver functions. \" + \"[Default None]\") PreGroup.add_argument( \"--snrh\", action=\"store\", type=float, dest=\"snrh\", default=-9999, help=\"Specify", "parameters of H-k search, including\" + \"bounds on search, weights, type of stacking,", "time: \" + args.endT) else: args.endT = None if args.strike is None and", "be \" + \"contained within the station database. Partial keys will \" +", "\" + args.startT) else: args.startT = None # construct end time if len(args.endT)", "args = get_hk_arguments() # Load Database db, stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys) # Track", "rfRstream = rf_tmp[0] else: rf_tmp = binning.bin(rfRstream, typ='slow', nbin=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0]", "[Default processes all stations in the database]\") parser.add_argument( \"-v\", \"-V\", \"--verbose\", action=\"store_true\", dest=\"verb\",", "default=None, help=\"Specify the dip of dipping Moho. [Default None]\") PlotGroup = parser.add_argument_group( title='Settings", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "type=str, dest=\"form\", default=\"png\", help=\"Specify format of figure. Can be any one of the", "EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "Track processed folders procfold = [] # Loop over station keys for stkey", "in args.kbound.split(',')] args.kbound = sorted(args.kbound) if (len(args.kbound)) != 2: parser.error( \"Error: --kbound should", "should contain 2 \" + \"comma-separated floats\") if args.hbound is None: args.hbound =", "args.snr: continue if meta.cc < args.cc: continue ''' # Check bounds on data", "= args.kbound hkstack.dh = args.dh hkstack.dk = args.dk hkstack.weights = args.weights # Stack", "doesn`t exist - creating it') savepath.mkdir(parents=True) # Get search start time if args.startT", "varR.append(np.var(rfRstream[i].data[tselect])) varR = np.array(varR) # Remove outliers wrt variance within time range medvarR", "'product'\") if args.copy: if args.bp_copy is None: args.bp_copy = [0.05, 0.35] else: args.bp_copy", "searching for receiver function data\") TimeGroup.add_argument( \"--start\", action=\"store\", type=str, dest=\"startT\", default=\"\", help=\"Specify a", "'S', 'SKS' and 'allS'.\") if args.phase == 'allP': args.listphase = ['P', 'PP'] elif", "Display print(\" \") print(\" \") print(\"|===============================================|\") print(\"|===============================================|\") print(\"| {0:>8s} |\".format( sta.station)) print(\"|===============================================|\") print(\"|===============================================|\")", "of the key type of the database.\" ) # Event Selection Criteria TimeGroup", "Settings', description=\"Miscellaneous default values and settings\") ModelGroup.add_argument( \"--vp\", action=\"store\", type=float, dest=\"vp\", default=6.0, help=\"Specify", "to file. \" + \"[Default doesn't save]\") # Constants Settings ModelGroup = parser.add_argument_group(", "date[6:8] dateUTC = UTCDateTime(year+'-'+month+'-'+day) if dateUTC > tstart and dateUTC < tend: #", "2 \" + \"comma-separated floats\") ## JMG ## if args.slowbound is None: args.slowbound", "+ \"bounds on search, weights, type of stacking, etc.\") HKGroup.add_argument( \"--hbound\", action=\"store\", type=str,", "during binning \" + \" [Default False]\") PreGroup.add_argument( \"--phase\", action=\"store\", type=str, dest=\"phase\", default='allP',", "[] # Loop over station keys for stkey in list(stkeys): # Extract station", "## if args.slowbound is None: args.slowbound = [0.04, 0.08] else: args.slowbound = [float(val)", "0: try: args.endT = UTCDateTime(args.endT) except: parser.error( \"Cannot construct UTCDateTime from end time:", "[float(val) for val in args.bazbound.split(',')] args.bazbound = sorted(args.bazbound) if (len(args.bazbound)) != 2: parser.error(", "if len(rfRstream) < 5: continue if args.save_plot and not Path('HK_PLOTS').is_dir(): Path('HK_PLOTS').mkdir(parents=True) print('') print(\"Number", "None and args.dip is None: args.calc_dip = False args.nbaz = None elif args.strike", "continue if args.no_outl: t1 = 0. t2 = 30. varR = [] for", "default=False, help=\"Set this option to save the HkStack object to file. \" +", "sta.enddate or tend < sta.startdate: continue # Temporary print locations tlocs = sta.location", "IN THE # SOFTWARE. # Import modules and functions import numpy as np", "3 \" + \"comma-separated floats\") return args def main(): print() print(\"#########################################\") print(\"# __", "# # Permission is hereby granted, free of charge, to any person obtaining", "0: tlocs = [''] for il in range(0, len(tlocs)): if len(tlocs[il]) == 0:", "the SNR threshold for extracting receiver functions. \" + \"[Default None]\") PreGroup.add_argument( \"--snrh\",", "args.calc_dip: rf_tmp = binning.bin_baz_slow(rfRstream, nbaz=args.nbaz+1, nslow=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0] else: rf_tmp =", "# If everything passed, load the RF data filename = folder / \"RF_Data.pkl\"", "(Hz). [Default [0.05, 0.35]]\") HKGroup = parser.add_argument_group( title='Settings for H-k Stacking', description=\"Specify parameters", "parser.error( \"Error: --hbound should contain 2 \" + \"comma-separated floats\") if args.kbound is", "help=\"Specify a UTCDateTime compatible string representing \" + \"the start time for the", "= None if args.strike is None and args.dip is None: args.calc_dip = False", "HKGroup = parser.add_argument_group( title='Settings for H-k Stacking', description=\"Specify parameters of H-k search, including\"", "if (len(args.kbound)) != 2: parser.error( \"Error: --kbound should contain 2 \" + \"comma-separated", "< tend: # Load meta data metafile = folder / \"Meta_Data.pkl\" if not", "np import pickle import stdb from obspy.clients.fdsn import Client from obspy.core import Stream,", "in args.hbound.split(',')] args.hbound = sorted(args.hbound) if (len(args.hbound)) != 2: parser.error( \"Error: --hbound should", "# Check bin counts: for tr in rfRstream: if (tr.stats.nbin < args.binlim): rfRstream.remove(tr)", "5: continue if args.save_plot and not Path('HK_PLOTS').is_dir(): Path('HK_PLOTS').mkdir(parents=True) print('') print(\"Number of radial RF", "help=\"Set this option to produce a plot of the stacks [Default \" +", "FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING", "= \"--\" sta.location = tlocs # Update Display print(\" \") print(\" \") print(\"|===============================================|\")", "\"[Default 'allP']\") PreGroup.add_argument( \"--copy\", action=\"store_true\", dest=\"copy\", default=False, help=\"Set this option to use a", "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "dest=\"save\", default=False, help=\"Set this option to save the HkStack object to file. \"", "type=str, dest=\"endT\", default=\"\", help=\"Specify a UTCDateTime compatible string representing \" + \"the end", "keys=args.stkeys) # Track processed folders procfold = [] # Loop over station keys", "'PP', 'allP', 'S', 'SKS' and 'allS'.\") if args.phase == 'allP': args.listphase = ['P',", "args.phase == 'allS': args.listphase = ['S', 'SKS'] else: args.listphase = [args.phase] if args.typ", "args.listphase = [args.phase] if args.typ not in ['sum', 'product']: parser.error( \"Error: choose between", "freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True) # Initialize the HkStack object try: hkstack = HkStack(rfRstream,", "produce plot]\") PlotGroup.add_argument( \"--save-plot\", action=\"store_true\", dest=\"save_plot\", default=False, help=\"Set this option to save the", "the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "import nan def get_hk_arguments(argv=None): \"\"\" Get Options from :class:`~optparse.OptionParser` objects. This function is", "Channel: {0:2s}; Locations: {1:15s} |\".format( sta.channel, \",\".join(tlocs))) print(\"| Lon: {0:7.2f}; Lat: {1:6.2f} |\".format(", "of the Software, and to permit persons to whom the Software is #", "in rfRstream: if (tr.stats.nbin < args.binlim): rfRstream.remove(tr) # Continue if stream is too", "+ \"[Default None]\") PreGroup.add_argument( \"--no-outlier\", action=\"store_true\", dest=\"no_outl\", default=False, help=\"Set this option to delete", "a UTCDateTime compatible string representing \" + \"the start time for the search.", "__ #\") print(\"# | '__| |_| '_ \\| | | | | '_", "time if args.startT is None: tstart = sta.startdate else: tstart = args.startT #", "type=float, dest=\"strike\", default=None, help=\"Specify the strike of dipping Moho. [Default None]\") ModelGroup.add_argument( \"--dip\",", "contain 2 \" + \"comma-separated floats\") if args.weights is None: args.weights = [0.5,", "datapath = Path('S_DATA') / stfld if not datapath.is_dir(): print('Path to ' + str(datapath)", "print('') print(\"Number of radial RF data: \" + str(len(rfRstream))) print('') # Try binning", "floats\") if args.kbound is None: args.kbound = [1.56, 2.1] else: args.kbound = [float(val)", "action=\"store\", type=float, dest=\"cc\", default=-1., help=\"Specify the CC threshold for extracting receiver functions. \"", "help=\"Set this option to save the plot [Default doesn't save]\") PlotGroup.add_argument( \"--title\", action=\"store\",", "%H:%M:%S\"))) print(\"|-----------------------------------------------|\") # Check for folder already processed if stfld in procfold: print('", "outliersR[::-1]: rfRstream.remove(rfRstream[i]) print('') print(\"Number of radial RF data: \" + str(len(rfRstream))) print('') #", "\" + \"station end times [Default end date of station]\") PreGroup = parser.add_argument_group(", "= Path('P_DATA') / stfld elif args.phase in ['S', 'SKS', 'allS']: datapath = Path('S_DATA')", "Import modules and functions import numpy as np import pickle import stdb from", "will override any \" + \"station start times. [Default start date of station]\")", "_ #\") print(\"# _ __ / _|_ __ _ _ | |__ |", "range(len(rfRstream)): taxis = rfRstream[i].stats.taxis tselect = (taxis > t1) & (taxis < t2)", "\" + \"[Default False]\") PreGroup.add_argument( \"--bp-copy\", action=\"store\", dest=\"bp_copy\", type=str, default=None, help=\"Specify a list", "\"of positive values in stacks. [Default 'sum']\") HKGroup.add_argument( \"--save\", action=\"store_true\", dest=\"save\", default=False, help=\"Set", "2 \" + \"comma-separated floats\") ## JMG ## if args.phase not in ['P',", "[''] for il in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = \"--\"", "folder names to use long-key form (NET.STN.CHN). \" + \"Default behaviour uses short", "sta.station)) print(\"|===============================================|\") print(\"|===============================================|\") print(\"| Station: {0:>2s}.{1:5s} |\".format( sta.network, sta.station)) print(\"| Channel: {0:2s}; Locations:", "end date of station]\") PreGroup = parser.add_argument_group( title='Pre-processing Settings', description=\"Options for pre-processing of", "dest=\"ovr\", default=False, help=\"Force the overwriting of pre-existing data. \" + \"[Default False]\") parser.add_argument(", "search interval for H (km). [Default 0.5]\") HKGroup.add_argument( \"--kbound\", action=\"store\", type=str, dest=\"kbound\", default=None,", "for val in args.kbound.split(',')] args.kbound = sorted(args.kbound) if (len(args.kbound)) != 2: parser.error( \"Error:", "< args.snr: continue if meta.cc < args.cc: continue ''' # Check bounds on", "len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') # construct start time if len(args.startT) >", "including without limitation the rights # to use, copy, modify, merge, publish, distribute,", "= ['S', 'SKS'] else: args.listphase = [args.phase] if args.typ not in ['sum', 'product']:", "list(stkeys): # Extract station information from dictionary sta = db[stkey] # Construct Folder", "'PP', 'allP', 'S', 'SKS' or 'allS'. \" + \"[Default 'allP']\") PreGroup.add_argument( \"--copy\", action=\"store_true\",", "file is part of RfPy. # # Permission is hereby granted, free of", "|\".format( sta.channel, \",\".join(tlocs))) print(\"| Lon: {0:7.2f}; Lat: {1:6.2f} |\".format( sta.longitude, sta.latitude)) print(\"| Start", "savepath.is_dir(): print('Path to '+str(savepath)+' doesn`t exist - creating it') savepath.mkdir(parents=True) # Get search", "stations in \" + \"the IU network [Default processes all stations in the", "action=\"store\", type=str, dest=\"title\", default=\"\", help=\"Specify plot title [Default has no title]\") PlotGroup.add_argument( \"--format\",", "H (km). [Default 0.5]\") HKGroup.add_argument( \"--kbound\", action=\"store\", type=str, dest=\"kbound\", default=None, help=\"Specify a list", "if not datapath.is_dir(): print('Path to ' + str(datapath) + ' doesn`t exist -", "= rfRstream[i].stats.taxis tselect = (taxis > t1) & (taxis < t2) varR.append(np.var(rfRstream[i].data[tselect])) varR", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "(degrees). [Default [0, 360]]\") PreGroup.add_argument( \"--pws\", action=\"store_true\", dest=\"pws\", default=False, help=\"Set this option to", "floats with minimum and maximum\" + \"bounds on back azimuth (degrees). [Default [0,", "= ArgumentParser( usage=\"%(prog)s [arguments] <station database>\", description=\"Script used to process receiver function data", "horizontal component SNR threshold for \" + \"extracting receiver functions. [Default None]\") PreGroup.add_argument(", "'SKS', 'allS']: datapath = Path('S_DATA') / stfld if not datapath.is_dir(): print('Path to '", "exist - creating it') savepath.mkdir(parents=True) # Get search start time if args.startT is", "stacks. [Default 'sum']\") HKGroup.add_argument( \"--save\", action=\"store_true\", dest=\"save\", default=False, help=\"Set this option to save", "val in args.hbound.split(',')] args.hbound = sorted(args.hbound) if (len(args.hbound)) != 2: parser.error( \"Error: --hbound", "if folder.name.startswith('.'): continue date = folder.name.split('_')[0] year = date[0:4] month = date[4:6] day", "args.bp = [0.05, 0.5] else: args.bp = [float(val) for val in args.bp.split(',')] args.bp", "slowness (s/km). [Default [0.04, 0.08]]\") PreGroup.add_argument( \"--bazbound\", action=\"store\", dest=\"bazbound\", type=str, default=None, help=\"Specify a", "\" + \"comma-separated floats\") ## JMG ## if args.slowbound is None: args.slowbound =", "from :class:`~optparse.OptionParser` objects. This function is used for data processing on-the-fly (requires web", "Load Database db, stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys) # Track processed folders procfold =", "\"-O\", \"--overwrite\", action=\"store_true\", dest=\"ovr\", default=False, help=\"Force the overwriting of pre-existing data. \" +", "with refining \" + \"the times to include in searching for receiver function", "and functions import numpy as np import pickle import stdb from obspy.clients.fdsn import", "the Software is # furnished to do so, subject to the following conditions:", "subject to the following conditions: # # The above copyright notice and this", "\"comma-separated floats\") if args.hbound is None: args.hbound = [20., 50.] else: args.hbound =", "default=\"\", help=\"Specify a comma separated list of station keys for \" + \"which", "parameters for plotting the H-k stacks.\") PlotGroup.add_argument( \"--plot\", action=\"store_true\", dest=\"plot\", default=False, help=\"Set this", "TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "args.strike is None or args.dip is None: parser.error(\"Specify both strike and dip for", "[float(val) for val in args.slowbound.split(',')] args.slowbound = sorted(args.slowbound) if (len(args.slowbound)) != 2: parser.error(", "\"--keys\", action=\"store\", type=str, dest=\"stkeys\", default=\"\", help=\"Specify a comma separated list of station keys", "start time if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except: parser.error( \"Cannot", "for val in args.slowbound.split(',')] args.slowbound = sorted(args.slowbound) if (len(args.slowbound)) != 2: parser.error( \"Error:", "#\") print(\"# | '__| |_| '_ \\| | | | | '_ \\|", "UTCDateTime compatible string representing \" + \"the end time for the search. This", "end time if args.endT is None: tend = sta.enddate else: tend = args.endT", "and maximum\" + \"bounds on slowness (s/km). [Default [0.04, 0.08]]\") PreGroup.add_argument( \"--bazbound\", action=\"store\",", "create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') # construct", "\"be used to match against those in the dictionary. For \" + \"instance,", "both strike and dip for this type \" + \"of analysis\") else: args.calc_dip", "any \" + \"station end times [Default end date of station]\") PreGroup =", "time if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except: parser.error( \"Cannot construct", "hkstack = HkStack(rfRstream, strike=args.strike, dip=args.dip, vp=args.vp) # Update attributes hkstack.hbound = args.hbound hkstack.kbound", "positive values in stacks. [Default 'sum']\") HKGroup.add_argument( \"--save\", action=\"store_true\", dest=\"save\", default=False, help=\"Set this", "folder.name.startswith('.'): continue date = folder.name.split('_')[0] year = date[0:4] month = date[4:6] day =", "help=\"Specify mean crustal Vp (km/s). [Default 6.0]\") ModelGroup.add_argument( \"--strike\", action=\"store\", type=float, dest=\"strike\", default=None,", "obspy.core import Stream, UTCDateTime from rfpy import binning, plotting, HkStack from pathlib import", "type=float, dest=\"dip\", default=None, help=\"Specify the dip of dipping Moho. [Default None]\") PlotGroup =", "consider. \" + \"[Default 40]\") PreGroup.add_argument( \"--snr\", action=\"store\", type=float, dest=\"snr\", default=-9999., help=\"Specify the", "dest=\"phase\", default='allP', help=\"Specify the phase name to plot. \" + \"Options are 'P',", "None: tend = sta.enddate else: tend = args.endT if tstart > sta.enddate or", "# continue ''' # If everything passed, load the RF data filename =", "+ \"comma-separated floats\") return args def main(): print() print(\"#########################################\") print(\"# __ _ _", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #", "# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies", "not datapath.is_dir(): print('Path to ' + str(datapath) + ' doesn`t exist - continuing')", "[1.56, 2.1] else: args.kbound = [float(val) for val in args.kbound.split(',')] args.kbound = sorted(args.kbound)", "number of slowness bins to consider. \" + \"[Default 40]\") PreGroup.add_argument( \"--snr\", action=\"store\",", "default=None, help=\"Specify the corner frequencies for the bandpass filter. \" + \"[Default 0.05,0.5]\")", "len(rfRstream) == 0: continue if args.no_outl: t1 = 0. t2 = 30. varR", "function data \" + \"for H-k stacking.\") # General Settings parser.add_argument( \"indb\", help=\"Station", "outliers based on the MAD \" + \"on the variance. [Default False]\") PreGroup.add_argument(", "is hereby granted, free of charge, to any person obtaining a copy #", "the HkStack object to file. \" + \"[Default doesn't save]\") # Constants Settings", "args.lkey: stfld = stkey.split('.')[0]+\".\"+stkey.split('.')[1] # Define path to see if it exists if", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "None: args.weights = [0.5, 2.0, -1.0] else: args.weights = [float(val) for val in", "|\".format( sta.longitude, sta.latitude)) print(\"| Start time: {0:19s} |\".format( sta.startdate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"| End time:", "is None: parser.error(\"Specify both strike and dip for this type \" + \"of", "parser.error( \"Cannot construct UTCDateTime from end time: \" + args.endT) else: args.endT =", "default=\"\", help=\"Specify plot title [Default has no title]\") PlotGroup.add_argument( \"--format\", action=\"store\", type=str, dest=\"form\",", "interval for k. [Default 0.02]\") HKGroup.add_argument( \"--weights\", action=\"store\", type=str, dest=\"weights\", default=None, help=\"Specify a", "\"--hbound\", action=\"store\", type=str, dest=\"hbound\", default=None, help=\"Specify a list of two floats with minimum", "= Path('S_DATA') / stfld if not datapath.is_dir(): print('Path to ' + str(datapath) +", "rfRstream = Stream() datafiles = [x for x in datapath.iterdir() if x.is_dir()] for", "Constants Settings ModelGroup = parser.add_argument_group( title='Model Settings', description=\"Miscellaneous default values and settings\") ModelGroup.add_argument(", "stacking, etc.\") HKGroup.add_argument( \"--hbound\", action=\"store\", type=str, dest=\"hbound\", default=None, help=\"Specify a list of two", "'product']: parser.error( \"Error: choose between 'sum' and 'product'\") if args.copy: if args.bp_copy is", "+ str(datapath) + ' doesn`t exist - continuing') continue # Define save path", "rfRstream.remove(tr) # Continue if stream is too short if len(rfRstream) < 5: continue", "action=\"store\", type=float, dest=\"snrh\", default=-9999, help=\"Specify the horizontal component SNR threshold for \" +", "PreGroup.add_argument( \"--phase\", action=\"store\", type=str, dest=\"phase\", default='allP', help=\"Specify the phase name to plot. \"", "tlocs = sta.location if len(tlocs) == 0: tlocs = [''] for il in", "the bandpass filter. \" + \"[Default 0.05,0.5]\") PreGroup.add_argument( \"--nbaz\", action=\"store\", dest=\"nbaz\", type=int, default=36,", "and dateUTC < tend: # Load meta data metafile = folder / \"Meta_Data.pkl\"", "# QC Thresholding if meta.snrh < args.snrh: continue if meta.snr < args.snr: continue", "so, subject to the following conditions: # # The above copyright notice and", "to '+str(savepath)+' doesn`t exist - creating it') savepath.mkdir(parents=True) # Get search start time", "default=0.02, help=\"Specify search interval for k. [Default 0.02]\") HKGroup.add_argument( \"--weights\", action=\"store\", type=str, dest=\"weights\",", "process receiver function data \" + \"for H-k stacking.\") # General Settings parser.add_argument(", "if (len(args.bp_copy)) != 2: parser.error( \"Error: --bp_copy should contain 2 \" + \"comma-separated", "# Try binning if specified if args.calc_dip: rf_tmp = binning.bin_baz_slow(rfRstream, nbaz=args.nbaz+1, nslow=args.nslow+1, pws=args.pws)", "[Default doesn't save]\") PlotGroup.add_argument( \"--title\", action=\"store\", type=str, dest=\"title\", default=\"\", help=\"Specify plot title [Default", "action=\"store_true\", dest=\"no_outl\", default=False, help=\"Set this option to delete outliers based on the MAD", "match against those in the dictionary. For \" + \"instance, providing IU will", "continuing') continue # Define save path if args.save: savepath = Path('HK_DATA') / stfld", "args.phase in ['P', 'PP', 'allP']: datapath = Path('P_DATA') / stfld elif args.phase in", "copy # of this software and associated documentation files (the \"Software\"), to deal", "ModelGroup.add_argument( \"--strike\", action=\"store\", type=float, dest=\"strike\", default=None, help=\"Specify the strike of dipping Moho. [Default", "between 'P', 'PP', 'allP', 'S', 'SKS' and 'allS'.\") if args.phase == 'allP': args.listphase", "sta.startdate: continue # Temporary print locations tlocs = sta.location if len(tlocs) == 0:", "= parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error(\"Input file \" + args.indb", "from dictionary sta = db[stkey] # Construct Folder Name stfld = stkey if", "'allS': args.listphase = ['S', 'SKS'] else: args.listphase = [args.phase] if args.typ not in", "if len(tlocs[il]) == 0: tlocs[il] = \"--\" sta.location = tlocs # Update Display", "of three floats with for Ps, Pps and Pass \" + \"weights in", "+ args.endT) else: args.endT = None if args.strike is None and args.dip is", "Name stfld = stkey if not args.lkey: stfld = stkey.split('.')[0]+\".\"+stkey.split('.')[1] # Define path", "> 2.5] for i in outliersR[::-1]: rfRstream.remove(rfRstream[i]) print('') print(\"Number of radial RF data:", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # Import", "sta.enddate else: tend = args.endT if tstart > sta.enddate or tend < sta.startdate:", "0.35]]\") HKGroup = parser.add_argument_group( title='Settings for H-k Stacking', description=\"Specify parameters of H-k search,", "in searching for receiver function data\") TimeGroup.add_argument( \"--start\", action=\"store\", type=str, dest=\"startT\", default=\"\", help=\"Specify", "\"matplotlib formats: 'png', 'jpg', 'eps', 'pdf'. [Default 'png']\") args = parser.parse_args(argv) # Check", "dest=\"binlim\", default=1, help=\"Specify the minimum number of RFs in each bin. [Default 3]\")", "\"bounds on search, weights, type of stacking, etc.\") HKGroup.add_argument( \"--hbound\", action=\"store\", type=str, dest=\"hbound\",", "(km). [Default 0.5]\") HKGroup.add_argument( \"--kbound\", action=\"store\", type=str, dest=\"kbound\", default=None, help=\"Specify a list of", "args.cc: continue ''' # Check bounds on data # if meta.slow < args.slowbound[0]", "\" + \"a weighted average (using weights), or 'product' for the product \"", "minimum and maximum\" + \"bounds on Moho depth (H, in km). [Default [20.,", "hkstack.dh = args.dh hkstack.dk = args.dk hkstack.weights = args.weights # Stack with or", "= None elif args.strike is None or args.dip is None: parser.error(\"Specify both strike", "# construct start time if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except:", "'allP': args.listphase = ['P', 'PP'] elif args.phase == 'allS': args.listphase = ['S', 'SKS']", "THE # SOFTWARE. # Import modules and functions import numpy as np import", "\"contained within the station database. Partial keys will \" + \"be used to", "action=\"store\", dest=\"nslow\", type=int, default=40, help=\"Specify integer number of slowness bins to consider. \"", "'sum']\") HKGroup.add_argument( \"--save\", action=\"store_true\", dest=\"save\", default=False, help=\"Set this option to save the HkStack", "outliersR = np.arange(len(rfRstream))[robustR > 2.5] for i in outliersR[::-1]: rfRstream.remove(rfRstream[i]) print('') print(\"Number of", "list of station keys for \" + \"which to perform the analysis. These", "sublicense, and/or sell # copies of the Software, and to permit persons to", "parser.add_argument_group( title='Settings for H-k Stacking', description=\"Specify parameters of H-k search, including\" + \"bounds", "Locations: {1:15s} |\".format( sta.channel, \",\".join(tlocs))) print(\"| Lon: {0:7.2f}; Lat: {1:6.2f} |\".format( sta.longitude, sta.latitude))", "action=\"store\", type=str, dest=\"typ\", default=\"sum\", help=\"Specify type of final stacking. Options are: 'sum' for", "plot [Default doesn't save]\") PlotGroup.add_argument( \"--title\", action=\"store\", type=str, dest=\"title\", default=\"\", help=\"Specify plot title", "0.35] else: args.bp_copy = [float(val) for val in args.bp_copy.split(',')] args.bp_copy = sorted(args.bp_copy) if", "is part of RfPy. # # Permission is hereby granted, free of charge,", "'allS'. \" + \"[Default 'allP']\") PreGroup.add_argument( \"--copy\", action=\"store_true\", dest=\"copy\", default=False, help=\"Set this option", "action=\"store_true\", dest=\"ovr\", default=False, help=\"Force the overwriting of pre-existing data. \" + \"[Default False]\")", "[Default [20., 50.]]\") HKGroup.add_argument( \"--dh\", action=\"store\", type=float, dest=\"dh\", default=0.5, help=\"Specify search interval for", "dest=\"vp\", default=6.0, help=\"Specify mean crustal Vp (km/s). [Default 6.0]\") ModelGroup.add_argument( \"--strike\", action=\"store\", type=float,", "and maximum\" + \"bounds on back azimuth (degrees). [Default [0, 360]]\") PreGroup.add_argument( \"--pws\",", "day = date[6:8] dateUTC = UTCDateTime(year+'-'+month+'-'+day) if dateUTC > tstart and dateUTC <", "'P', 'PP', 'allP', 'S', 'SKS' or 'allS'. \" + \"[Default 'allP']\") PreGroup.add_argument( \"--copy\",", "[0.05, 0.35] else: args.bp_copy = [float(val) for val in args.bp_copy.split(',')] args.bp_copy = sorted(args.bp_copy)", "# copies of the Software, and to permit persons to whom the Software", "else: tend = args.endT if tstart > sta.enddate or tend < sta.startdate: continue", "long-key form (NET.STN.CHN). \" + \"Default behaviour uses short key form (NET.STN) for", "[Default [0.05, 0.35]]\") HKGroup = parser.add_argument_group( title='Settings for H-k Stacking', description=\"Specify parameters of", "typ='slow', nbin=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0] # Get a copy of the radial", "from.\", type=str) parser.add_argument( \"--keys\", action=\"store\", type=str, dest=\"stkeys\", default=\"\", help=\"Specify a comma separated list", "number of back-azimuth bins to consider. \" + \"[Default 36]\") PreGroup.add_argument( \"--nslow\", action=\"store\",", "TimeGroup = parser.add_argument_group( title=\"Time Settings\", description=\"Settings associated with refining \" + \"the times", "\"--kbound\", action=\"store\", type=str, dest=\"kbound\", default=None, help=\"Specify a list of two floats with minimum", "phases if meta.phase not in args.listphase: continue # QC Thresholding if meta.snrh <", "this permission notice shall be included in # all copies or substantial portions", "import exists as exist from numpy import nan def get_hk_arguments(argv=None): \"\"\" Get Options", "in ['P', 'PP', 'allP']: datapath = Path('P_DATA') / stfld elif args.phase in ['S',", "\"data prior to H-k stacking\") PreGroup.add_argument( \"--binlim\", action=\"store\", type=float, dest=\"binlim\", default=1, help=\"Specify the", "parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error(\"Input file \" + args.indb +", "print(\"|-----------------------------------------------|\") # Check for folder already processed if stfld in procfold: print(' {0}", "to save the HkStack object to file. \" + \"[Default doesn't save]\") #", "\" + args.indb + \" does not exist\") # create station key list", "rfRstream.append(rfdata[1]) file.close() if rfdata[0].stats.npts != 1451: print(folder) if len(rfRstream) == 0: continue if", "HKGroup.add_argument( \"--type\", action=\"store\", type=str, dest=\"typ\", default=\"sum\", help=\"Specify type of final stacking. Options are:", "final stack. [Default [0.5, 2., -1.]]\") HKGroup.add_argument( \"--type\", action=\"store\", type=str, dest=\"typ\", default=\"sum\", help=\"Specify", "corners=2, zerophase=True) # Initialize the HkStack object try: hkstack = HkStack(rfRstream, rfV2=rfRstream_copy, strike=args.strike,", "not in args.listphase: continue # QC Thresholding if meta.snrh < args.snrh: continue if", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #", "import pickle import stdb from obspy.clients.fdsn import Client from obspy.core import Stream, UTCDateTime", "is None: args.slowbound = [0.04, 0.08] else: args.slowbound = [float(val) for val in", "\" + \"[Default doesn't save]\") # Constants Settings ModelGroup = parser.add_argument_group( title='Model Settings',", "with minimum and maximum\" + \"frequency for the copies stream (Hz). [Default [0.05,", "args.binlim): rfRstream.remove(tr) # Continue if stream is too short if len(rfRstream) < 5:", "start date of station]\") TimeGroup.add_argument( \"--end\", action=\"store\", type=str, dest=\"endT\", default=\"\", help=\"Specify a UTCDateTime", "val in args.bp_copy.split(',')] args.bp_copy = sorted(args.bp_copy) if (len(args.bp_copy)) != 2: parser.error( \"Error: --bp_copy", "those in the dictionary. For \" + \"instance, providing IU will match with", "two floats with minimum and maximum\" + \"bounds on Vp/Vs (k). [Default [1.56,", "args.endT if tstart > sta.enddate or tend < sta.startdate: continue # Temporary print", "if args.plot: hkstack.plot(args.save_plot, args.title, args.form) if args.save: filename = savepath / (hkstack.rfV1[0].stats.station +", "floats\") if args.weights is None: args.weights = [0.5, 2.0, -1.0] else: args.weights =", "'allS']: datapath = Path('S_DATA') / stfld if not datapath.is_dir(): print('Path to ' +", "threshold for \" + \"extracting receiver functions. [Default None]\") PreGroup.add_argument( \"--cc\", action=\"store\", type=float,", "pickle import stdb from obspy.clients.fdsn import Client from obspy.core import Stream, UTCDateTime from", "key type of the database.\" ) # Event Selection Criteria TimeGroup = parser.add_argument_group(", "continue ''' # Check bounds on data # if meta.slow < args.slowbound[0] and", "sorted(args.hbound) if (len(args.hbound)) != 2: parser.error( \"Error: --hbound should contain 2 \" +", "= date[4:6] day = date[6:8] dateUTC = UTCDateTime(year+'-'+month+'-'+day) if dateUTC > tstart and", "if args.phase not in ['P', 'PP', 'allP', 'S', 'SKS', 'allS']: parser.error( \"Error: choose", "/ stfld elif args.phase in ['S', 'SKS', 'allS']: datapath = Path('S_DATA') / stfld", "|_|_|\\_\\ #\") print(\"# |_| |___/_____| #\") print(\"# #\") print(\"#########################################\") print() # Run Input", "FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF", "+ \" does not exist\") # create station key list if len(args.stkeys) >", "= HkStack(rfRstream, strike=args.strike, dip=args.dip, vp=args.vp) # Update attributes hkstack.hbound = args.hbound hkstack.kbound =", "merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to", "[Default 'sum']\") HKGroup.add_argument( \"--save\", action=\"store_true\", dest=\"save\", default=False, help=\"Set this option to save the", "args.slowbound.split(',')] args.slowbound = sorted(args.slowbound) if (len(args.slowbound)) != 2: parser.error( \"Error: --slowbound should contain", "option to delete outliers based on the MAD \" + \"on the variance.", "type=str, dest=\"startT\", default=\"\", help=\"Specify a UTCDateTime compatible string representing \" + \"the start", "+ \"[Default False]\") parser.add_argument( \"-L\", \"--long-name\", action=\"store_true\", dest=\"lkey\", default=False, help=\"Force folder names to", "(NET.STN) for the folder \" + \"names, regardless of the key type of", "parser.error( \"Error: --weights should contain 3 \" + \"comma-separated floats\") return args def", "= [1.56, 2.1] else: args.kbound = [float(val) for val in args.kbound.split(',')] args.kbound =", "if dateUTC > tstart and dateUTC < tend: # Load meta data metafile", "\" + \"comma-separated floats\") if args.kbound is None: args.kbound = [1.56, 2.1] else:", "for extracting receiver functions. \" + \"[Default None]\") PreGroup.add_argument( \"--snrh\", action=\"store\", type=float, dest=\"snrh\",", "corners for the Pps and Pss phases. \" + \"[Default False]\") PreGroup.add_argument( \"--bp-copy\",", "# if meta.baz < args.bazbound[0] and meta.baz > args.bazbound[1]: # continue ''' #", "if args.no_outl: t1 = 0. t2 = 30. varR = [] for i", "object to file. \" + \"[Default doesn't save]\") # Constants Settings ModelGroup =", "in datapath.iterdir() if x.is_dir()] for folder in datafiles: # Skip hidden folders if", "results', description=\"Specify parameters for plotting the H-k stacks.\") PlotGroup.add_argument( \"--plot\", action=\"store_true\", dest=\"plot\", default=False,", "on back azimuth (degrees). [Default [0, 360]]\") PreGroup.add_argument( \"--pws\", action=\"store_true\", dest=\"pws\", default=False, help=\"Set", "'SKS'] else: args.listphase = [args.phase] if args.typ not in ['sum', 'product']: parser.error( \"Error:", "\"--bp-copy\", action=\"store\", dest=\"bp_copy\", type=str, default=None, help=\"Specify a list of two floats with minimum", "and filter if args.copy: rfRstream_copy = rfRstream.copy() rfRstream_copy.filter('bandpass', freqmin=args.bp_copy[0], freqmax=args.bp_copy[1], corners=2, zerophase=True) #", "args.bp_copy = [0.05, 0.35] else: args.bp_copy = [float(val) for val in args.bp_copy.split(',')] args.bp_copy", "H-k stacks.\") PlotGroup.add_argument( \"--plot\", action=\"store_true\", dest=\"plot\", default=False, help=\"Set this option to produce a", "whom the Software is # furnished to do so, subject to the following", "help=\"Specify format of figure. Can be any one of the valid\" + \"matplotlib", "- creating it') savepath.mkdir(parents=True) # Get search start time if args.startT is None:", "= sorted(args.kbound) if (len(args.kbound)) != 2: parser.error( \"Error: --kbound should contain 2 \"", "# Extract station information from dictionary sta = db[stkey] # Construct Folder Name", "UTCDateTime from end time: \" + args.endT) else: args.endT = None if args.strike", "30. varR = [] for i in range(len(rfRstream)): taxis = rfRstream[i].stats.taxis tselect =", "sorted(args.bp_copy) if (len(args.bp_copy)) != 2: parser.error( \"Error: --bp_copy should contain 2 \" +", "the HkStack object try: hkstack = HkStack(rfRstream, rfV2=rfRstream_copy, strike=args.strike, dip=args.dip, vp=args.vp) except: hkstack", "\"--weights\", action=\"store\", type=str, dest=\"weights\", default=None, help=\"Specify a list of three floats with for", "copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software,", "if args.copy: if args.bp_copy is None: args.bp_copy = [0.05, 0.35] else: args.bp_copy =", "[Default 0.5]\") HKGroup.add_argument( \"--kbound\", action=\"store\", type=str, dest=\"kbound\", default=None, help=\"Specify a list of two", "dest=\"snr\", default=-9999., help=\"Specify the SNR threshold for extracting receiver functions. \" + \"[Default", "SNR threshold for \" + \"extracting receiver functions. [Default None]\") PreGroup.add_argument( \"--cc\", action=\"store\",", "Path('P_DATA') / stfld elif args.phase in ['S', 'SKS', 'allS']: datapath = Path('S_DATA') /", "this option to produce a plot of the stacks [Default \" + \"does", "Event Selection Criteria TimeGroup = parser.add_argument_group( title=\"Time Settings\", description=\"Settings associated with refining \"", "dip of dipping Moho. [Default None]\") PlotGroup = parser.add_argument_group( title='Settings for plotting results',", "start time: \" + args.startT) else: args.startT = None # construct end time", "{0} already processed...skipping '.format(stfld)) continue rfRstream = Stream() datafiles = [x for x", "args.bazbound[0] and meta.baz > args.bazbound[1]: # continue ''' # If everything passed, load", "is # furnished to do so, subject to the following conditions: # #", "default=0.5, help=\"Specify search interval for H (km). [Default 0.5]\") HKGroup.add_argument( \"--kbound\", action=\"store\", type=str,", "CC threshold for extracting receiver functions. \" + \"[Default None]\") PreGroup.add_argument( \"--no-outlier\", action=\"store_true\",", "within time range medvarR = np.median(varR) madvarR = 1.4826*np.median(np.abs(varR-medvarR)) robustR = np.abs((varR-medvarR)/madvarR) outliersR", "action=\"store\", type=str, dest=\"bp\", default=None, help=\"Specify the corner frequencies for the bandpass filter. \"", "\\| |/ / #\") print(\"# | | | _| |_) | |_| |", "/ stfld if not datapath.is_dir(): print('Path to ' + str(datapath) + ' doesn`t", "a plot of the stacks [Default \" + \"does not produce plot]\") PlotGroup.add_argument(", "time: \" + args.startT) else: args.startT = None # construct end time if", "Pss phases. \" + \"[Default False]\") PreGroup.add_argument( \"--bp-copy\", action=\"store\", dest=\"bp_copy\", type=str, default=None, help=\"Specify", "compatible string representing \" + \"the end time for the search. This will", "Lon: {0:7.2f}; Lat: {1:6.2f} |\".format( sta.longitude, sta.latitude)) print(\"| Start time: {0:19s} |\".format( sta.startdate.strftime(\"%Y-%m-%d", "get_hk_arguments(argv=None): \"\"\" Get Options from :class:`~optparse.OptionParser` objects. This function is used for data", "RfPy. # # Permission is hereby granted, free of charge, to any person", "\") print(\" \") print(\"|===============================================|\") print(\"|===============================================|\") print(\"| {0:>8s} |\".format( sta.station)) print(\"|===============================================|\") print(\"|===============================================|\") print(\"| Station:", "choose between 'P', 'PP', 'allP', 'S', 'SKS' and 'allS'.\") if args.phase == 'allP':", "and meta.slow > args.slowbound[1]: # continue # if meta.baz < args.bazbound[0] and meta.baz", "rfpy import binning, plotting, HkStack from pathlib import Path from argparse import ArgumentParser", "= [0.05, 0.35] else: args.bp_copy = [float(val) for val in args.bp_copy.split(',')] args.bp_copy =", "is None: args.kbound = [1.56, 2.1] else: args.kbound = [float(val) for val in", "= [''] for il in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] =", "OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE", "for val in args.bazbound.split(',')] args.bazbound = sorted(args.bazbound) if (len(args.bazbound)) != 2: parser.error( \"Error:", "if len(tlocs) == 0: tlocs = [''] for il in range(0, len(tlocs)): if", "wrt variance within time range medvarR = np.median(varR) madvarR = 1.4826*np.median(np.abs(varR-medvarR)) robustR =", "\"--end\", action=\"store\", type=str, dest=\"endT\", default=\"\", help=\"Specify a UTCDateTime compatible string representing \" +", "rf_tmp = binning.bin(rfRstream, typ='slow', nbin=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0] # Get a copy", "if meta.baz < args.bazbound[0] and meta.baz > args.bazbound[1]: # continue ''' # If", "| | | < #\") print(\"# |_| |_| | .__/ \\__, |___|_| |_|_|\\_\\", "val in args.weights.split(',')] if (len(args.weights)) != 3: parser.error( \"Error: --weights should contain 3", "parser.add_argument_group( title='Settings for plotting results', description=\"Specify parameters for plotting the H-k stacks.\") PlotGroup.add_argument(", "dest=\"no_outl\", default=False, help=\"Set this option to delete outliers based on the MAD \"", "PreGroup.add_argument( \"--nslow\", action=\"store\", dest=\"nslow\", type=int, default=40, help=\"Specify integer number of slowness bins to", "else: args.kbound = [float(val) for val in args.kbound.split(',')] args.kbound = sorted(args.kbound) if (len(args.kbound))", "\"indb\", help=\"Station Database to process from.\", type=str) parser.add_argument( \"--keys\", action=\"store\", type=str, dest=\"stkeys\", default=\"\",", "= args.hbound hkstack.kbound = args.kbound hkstack.dh = args.dh hkstack.dk = args.dk hkstack.weights =", "\"--snr\", action=\"store\", type=float, dest=\"snr\", default=-9999., help=\"Specify the SNR threshold for extracting receiver functions.", "else: rf_tmp = binning.bin(rfRstream, typ='slow', nbin=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0] # Get a", "with minimum and maximum\" + \"bounds on Vp/Vs (k). [Default [1.56, 2.1]]\") HKGroup.add_argument(", "folder / \"RF_Data.pkl\" if filename.is_file(): file = open(filename, \"rb\") rfdata = pickle.load(file) rfRstream.append(rfdata[1])", "associated with refining \" + \"the times to include in searching for receiver", "use long-key form (NET.STN.CHN). \" + \"Default behaviour uses short key form (NET.STN)", "\" [Default False]\") PreGroup.add_argument( \"--phase\", action=\"store\", type=str, dest=\"phase\", default='allP', help=\"Specify the phase name", "web connection) \"\"\" parser = ArgumentParser( usage=\"%(prog)s [arguments] <station database>\", description=\"Script used to", "default='allP', help=\"Specify the phase name to plot. \" + \"Options are 'P', 'PP',", "'S', 'SKS' or 'allS'. \" + \"[Default 'allP']\") PreGroup.add_argument( \"--copy\", action=\"store_true\", dest=\"copy\", default=False,", "\" + \"comma-separated floats\") if args.weights is None: args.weights = [0.5, 2.0, -1.0]", "stfld in procfold: print(' {0} already processed...skipping '.format(stfld)) continue rfRstream = Stream() datafiles", "datapath.is_dir(): print('Path to ' + str(datapath) + ' doesn`t exist - continuing') continue", "sta.channel, \",\".join(tlocs))) print(\"| Lon: {0:7.2f}; Lat: {1:6.2f} |\".format( sta.longitude, sta.latitude)) print(\"| Start time:", "Temporary print locations tlocs = sta.location if len(tlocs) == 0: tlocs = ['']", "rfRstream: if (tr.stats.nbin < args.binlim): rfRstream.remove(tr) # Continue if stream is too short", "\" + \"of positive values in stacks. [Default 'sum']\") HKGroup.add_argument( \"--save\", action=\"store_true\", dest=\"save\",", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY,", "{1:6.2f} |\".format( sta.longitude, sta.latitude)) print(\"| Start time: {0:19s} |\".format( sta.startdate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"| End", "against those in the dictionary. For \" + \"instance, providing IU will match", "stfld if not savepath.is_dir(): print('Path to '+str(savepath)+' doesn`t exist - creating it') savepath.mkdir(parents=True)", "- continuing') continue # Define save path if args.save: savepath = Path('HK_DATA') /", "\"the end time for the search. This will override any \" + \"station", "minimum number of RFs in each bin. [Default 3]\") PreGroup.add_argument( \"--bp\", action=\"store\", type=str,", "sta.latitude)) print(\"| Start time: {0:19s} |\".format( sta.startdate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"| End time: {0:19s} |\".format(", "will match with all stations in \" + \"the IU network [Default processes", "furnished to do so, subject to the following conditions: # # The above", "parser.add_argument( \"indb\", help=\"Station Database to process from.\", type=str) parser.add_argument( \"--keys\", action=\"store\", type=str, dest=\"stkeys\",", "dip for this type \" + \"of analysis\") else: args.calc_dip = True if", "\" + \"of analysis\") else: args.calc_dip = True if args.bp is None: args.bp", "km). [Default [20., 50.]]\") HKGroup.add_argument( \"--dh\", action=\"store\", type=float, dest=\"dh\", default=0.5, help=\"Specify search interval", "= [] # Loop over station keys for stkey in list(stkeys): # Extract", "PreGroup.add_argument( \"--snr\", action=\"store\", type=float, dest=\"snr\", default=-9999., help=\"Specify the SNR threshold for extracting receiver", "#\") print(\"# |_| |___/_____| #\") print(\"# #\") print(\"#########################################\") print() # Run Input Parser", "|_| |___/_____| #\") print(\"# #\") print(\"#########################################\") print() # Run Input Parser args =", "help=\"Specify search interval for k. [Default 0.02]\") HKGroup.add_argument( \"--weights\", action=\"store\", type=str, dest=\"weights\", default=None,", "title='Pre-processing Settings', description=\"Options for pre-processing of receiver function \" + \"data prior to", "PreGroup.add_argument( \"--cc\", action=\"store\", type=float, dest=\"cc\", default=-1., help=\"Specify the CC threshold for extracting receiver", "back azimuth (degrees). [Default [0, 360]]\") PreGroup.add_argument( \"--pws\", action=\"store_true\", dest=\"pws\", default=False, help=\"Set this", "+ \"the IU network [Default processes all stations in the database]\") parser.add_argument( \"-v\",", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT", "[0, 360]]\") PreGroup.add_argument( \"--pws\", action=\"store_true\", dest=\"pws\", default=False, help=\"Set this option to use phase-weighted", "if args.bp is None: args.bp = [0.05, 0.5] else: args.bp = [float(val) for", "action=\"store\", type=str, dest=\"endT\", default=\"\", help=\"Specify a UTCDateTime compatible string representing \" + \"the", "processed if stfld in procfold: print(' {0} already processed...skipping '.format(stfld)) continue rfRstream =", "[float(val) for val in args.weights.split(',')] if (len(args.weights)) != 3: parser.error( \"Error: --weights should", "# Copyright 2019 <NAME> # # This file is part of RfPy. #", "HKGroup.add_argument( \"--dh\", action=\"store\", type=float, dest=\"dh\", default=0.5, help=\"Specify search interval for H (km). [Default", "if meta.cc < args.cc: continue ''' # Check bounds on data # if", "\"Meta_Data.pkl\" if not metafile.is_file(): continue meta = pickle.load(open(metafile, 'rb')) # Skip data not", "be included in # all copies or substantial portions of the Software. #", "+ \"Default behaviour uses short key form (NET.STN) for the folder \" +", "+ \"for H-k stacking.\") # General Settings parser.add_argument( \"indb\", help=\"Station Database to process", "dest=\"weights\", default=None, help=\"Specify a list of three floats with for Ps, Pps and", "two floats with minimum and maximum\" + \"bounds on Moho depth (H, in", "\",\".join(tlocs))) print(\"| Lon: {0:7.2f}; Lat: {1:6.2f} |\".format( sta.longitude, sta.latitude)) print(\"| Start time: {0:19s}", "PlotGroup.add_argument( \"--save-plot\", action=\"store_true\", dest=\"save_plot\", default=False, help=\"Set this option to save the plot [Default", "UTCDateTime from rfpy import binning, plotting, HkStack from pathlib import Path from argparse", "pickle.load(file) rfRstream.append(rfdata[1]) file.close() if rfdata[0].stats.npts != 1451: print(folder) if len(rfRstream) == 0: continue", "the radial component and filter if args.copy: rfRstream_copy = rfRstream.copy() rfRstream_copy.filter('bandpass', freqmin=args.bp_copy[0], freqmax=args.bp_copy[1],", "= folder.name.split('_')[0] year = date[0:4] month = date[4:6] day = date[6:8] dateUTC =", "rfRstream = rf_tmp[0] # Get a copy of the radial component and filter", "in each bin. [Default 3]\") PreGroup.add_argument( \"--bp\", action=\"store\", type=str, dest=\"bp\", default=None, help=\"Specify the", "+ \"comma-separated floats\") ## JMG ## if args.phase not in ['P', 'PP', 'allP',", "from argparse import ArgumentParser from os.path import exists as exist from numpy import", "stacking\") PreGroup.add_argument( \"--binlim\", action=\"store\", type=float, dest=\"binlim\", default=1, help=\"Specify the minimum number of RFs", "construct end time if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except: parser.error(", "Update processed folders procfold.append(stfld) if __name__ == \"__main__\": # Run main program main()", "# Skip data not in list of phases if meta.phase not in args.listphase:", "Filter original stream rfRstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True) # Initialize the HkStack object", "for this type \" + \"of analysis\") else: args.calc_dip = True if args.bp", "['P', 'PP'] elif args.phase == 'allS': args.listphase = ['S', 'SKS'] else: args.listphase =", "default=1, help=\"Specify the minimum number of RFs in each bin. [Default 3]\") PreGroup.add_argument(", "= get_hk_arguments() # Load Database db, stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys) # Track processed", "Ps, Pps and Pass \" + \"weights in final stack. [Default [0.5, 2.,", "+ \"instance, providing IU will match with all stations in \" + \"the", "dip if args.calc_dip: hkstack.stack_dip() else: hkstack.stack() # Average stacks hkstack.average(typ=args.typ) if args.plot: hkstack.plot(args.save_plot,", "receiver functions. \" + \"[Default None]\") PreGroup.add_argument( \"--no-outlier\", action=\"store_true\", dest=\"no_outl\", default=False, help=\"Set this", "PlotGroup.add_argument( \"--title\", action=\"store\", type=str, dest=\"title\", default=\"\", help=\"Specify plot title [Default has no title]\")", "if args.calc_dip: hkstack.stack_dip() else: hkstack.stack() # Average stacks hkstack.average(typ=args.typ) if args.plot: hkstack.plot(args.save_plot, args.title,", "time: {0:19s} |\".format( sta.startdate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"| End time: {0:19s} |\".format( sta.enddate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"|-----------------------------------------------|\")", "floats with for Ps, Pps and Pass \" + \"weights in final stack.", "for the folder \" + \"names, regardless of the key type of the", "from obspy.core import Stream, UTCDateTime from rfpy import binning, plotting, HkStack from pathlib", "End time: {0:19s} |\".format( sta.enddate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"|-----------------------------------------------|\") # Check for folder already processed", "filename = folder / \"RF_Data.pkl\" if filename.is_file(): file = open(filename, \"rb\") rfdata =", "!= 2: parser.error( \"Error: --bp_copy should contain 2 \" + \"comma-separated floats\") if", "= [0.05, 0.5] else: args.bp = [float(val) for val in args.bp.split(',')] args.bp =", "Vp/Vs (k). [Default [1.56, 2.1]]\") HKGroup.add_argument( \"--dk\", action=\"store\", type=float, dest=\"dk\", default=0.02, help=\"Specify search", "= True if args.bp is None: args.bp = [0.05, 0.5] else: args.bp =", "TimeGroup.add_argument( \"--start\", action=\"store\", type=str, dest=\"startT\", default=\"\", help=\"Specify a UTCDateTime compatible string representing \"", "is None: args.weights = [0.5, 2.0, -1.0] else: args.weights = [float(val) for val", "+ \"comma-separated floats\") if args.kbound is None: args.kbound = [1.56, 2.1] else: args.kbound", "and meta.baz > args.bazbound[1]: # continue ''' # If everything passed, load the", "a list of two floats with minimum and maximum\" + \"bounds on Moho", "of radial RF bins: \" + str(len(rfRstream))) print('') # Filter original stream rfRstream.filter('bandpass',", "keys for \" + \"which to perform the analysis. These must be \"", "doesn`t exist - continuing') continue # Define save path if args.save: savepath =", "' doesn`t exist - continuing') continue # Define save path if args.save: savepath", "if meta.snrh < args.snrh: continue if meta.snr < args.snr: continue if meta.cc <", "key form (NET.STN) for the folder \" + \"names, regardless of the key", "len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except: parser.error( \"Cannot construct UTCDateTime from", "0.02]\") HKGroup.add_argument( \"--weights\", action=\"store\", type=str, dest=\"weights\", default=None, help=\"Specify a list of three floats", "| _| |_) | |_| | | | | | < #\") print(\"#", "(km/s). [Default 6.0]\") ModelGroup.add_argument( \"--strike\", action=\"store\", type=float, dest=\"strike\", default=None, help=\"Specify the strike of", "counts: for tr in rfRstream: if (tr.stats.nbin < args.binlim): rfRstream.remove(tr) # Continue if", "# Check for folder already processed if stfld in procfold: print(' {0} already", "= [0.5, 2.0, -1.0] else: args.weights = [float(val) for val in args.weights.split(',')] if", "variance. [Default False]\") PreGroup.add_argument( \"--slowbound\", action=\"store\", dest=\"slowbound\", type=str, default=None, help=\"Specify a list of", "option to use phase-weighted stacking during binning \" + \" [Default False]\") PreGroup.add_argument(", "| | | | '_ \\| |/ / #\") print(\"# | | |", "rfdata[0].stats.npts != 1451: print(folder) if len(rfRstream) == 0: continue if args.no_outl: t1 =", "default=\"\", help=\"Specify a UTCDateTime compatible string representing \" + \"the start time for", "of figure. Can be any one of the valid\" + \"matplotlib formats: 'png',", "= parser.add_argument_group( title='Pre-processing Settings', description=\"Options for pre-processing of receiver function \" + \"data", "#\") print(\"# |_| |_| | .__/ \\__, |___|_| |_|_|\\_\\ #\") print(\"# |_| |___/_____|", "the overwriting of pre-existing data. \" + \"[Default False]\") parser.add_argument( \"-L\", \"--long-name\", action=\"store_true\",", "\" + \"filtered at different corners for the Pps and Pss phases. \"", "\" + \"station start times. [Default start date of station]\") TimeGroup.add_argument( \"--end\", action=\"store\",", "help=\"Specify a list of three floats with for Ps, Pps and Pass \"", "# General Settings parser.add_argument( \"indb\", help=\"Station Database to process from.\", type=str) parser.add_argument( \"--keys\",", "are: 'sum' for \" + \"a weighted average (using weights), or 'product' for", "--bazbound should contain 2 \" + \"comma-separated floats\") ## JMG ## if args.phase", "\"Error: --bazbound should contain 2 \" + \"comma-separated floats\") ## JMG ## if", "PreGroup.add_argument( \"--snrh\", action=\"store\", type=float, dest=\"snrh\", default=-9999, help=\"Specify the horizontal component SNR threshold for", "PreGroup.add_argument( \"--binlim\", action=\"store\", type=float, dest=\"binlim\", default=1, help=\"Specify the minimum number of RFs in", "meta.baz < args.bazbound[0] and meta.baz > args.bazbound[1]: # continue ''' # If everything", "title]\") PlotGroup.add_argument( \"--format\", action=\"store\", type=str, dest=\"form\", default=\"png\", help=\"Specify format of figure. Can be", "the phase name to plot. \" + \"Options are 'P', 'PP', 'allP', 'S',", "None]\") PlotGroup = parser.add_argument_group( title='Settings for plotting results', description=\"Specify parameters for plotting the", "= False args.nbaz = None elif args.strike is None or args.dip is None:", "copyright notice and this permission notice shall be included in # all copies", "time if args.endT is None: tend = sta.enddate else: tend = args.endT if", "Database to process from.\", type=str) parser.add_argument( \"--keys\", action=\"store\", type=str, dest=\"stkeys\", default=\"\", help=\"Specify a", "continue # Temporary print locations tlocs = sta.location if len(tlocs) == 0: tlocs", "\" + \"[Default False]\") parser.add_argument( \"-L\", \"--long-name\", action=\"store_true\", dest=\"lkey\", default=False, help=\"Force folder names", "deal # in the Software without restriction, including without limitation the rights #", "or 'allS'. \" + \"[Default 'allP']\") PreGroup.add_argument( \"--copy\", action=\"store_true\", dest=\"copy\", default=False, help=\"Set this", "3: parser.error( \"Error: --weights should contain 3 \" + \"comma-separated floats\") return args", "[0.0, 360.0] else: args.bazbound = [float(val) for val in args.bazbound.split(',')] args.bazbound = sorted(args.bazbound)", "title='Settings for plotting results', description=\"Specify parameters for plotting the H-k stacks.\") PlotGroup.add_argument( \"--plot\",", "+ \"bounds on back azimuth (degrees). [Default [0, 360]]\") PreGroup.add_argument( \"--pws\", action=\"store_true\", dest=\"pws\",", "This function is used for data processing on-the-fly (requires web connection) \"\"\" parser", "# Stack with or without dip if args.calc_dip: hkstack.stack_dip() else: hkstack.stack() # Average", "is None: args.bazbound = [0.0, 360.0] else: args.bazbound = [float(val) for val in", "--kbound should contain 2 \" + \"comma-separated floats\") if args.weights is None: args.weights", "(len(args.kbound)) != 2: parser.error( \"Error: --kbound should contain 2 \" + \"comma-separated floats\")", "floats\") if args.hbound is None: args.hbound = [20., 50.] else: args.hbound = [float(val)", "main(): print() print(\"#########################################\") print(\"# __ _ _ #\") print(\"# _ __ / _|_", "bin counts: for tr in rfRstream: if (tr.stats.nbin < args.binlim): rfRstream.remove(tr) # Continue", "import Client from obspy.core import Stream, UTCDateTime from rfpy import binning, plotting, HkStack", "search interval for k. [Default 0.02]\") HKGroup.add_argument( \"--weights\", action=\"store\", type=str, dest=\"weights\", default=None, help=\"Specify", "the search. This will override any \" + \"station start times. [Default start", "time if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except: parser.error( \"Cannot construct", "None]\") ModelGroup.add_argument( \"--dip\", action=\"store\", type=float, dest=\"dip\", default=None, help=\"Specify the dip of dipping Moho.", "search end time if args.endT is None: tend = sta.enddate else: tend =", "# Filter original stream rfRstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True) # Initialize the HkStack", "None if args.strike is None and args.dip is None: args.calc_dip = False args.nbaz", "zerophase=True) # Initialize the HkStack object try: hkstack = HkStack(rfRstream, rfV2=rfRstream_copy, strike=args.strike, dip=args.dip,", "USE OR OTHER DEALINGS IN THE # SOFTWARE. # Import modules and functions", "list of two floats with minimum and maximum\" + \"bounds on slowness (s/km).", "KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "to whom the Software is # furnished to do so, subject to the", "def main(): print() print(\"#########################################\") print(\"# __ _ _ #\") print(\"# _ __ /", "import Stream, UTCDateTime from rfpy import binning, plotting, HkStack from pathlib import Path", "in the dictionary. For \" + \"instance, providing IU will match with all", "hkstack.weights = args.weights # Stack with or without dip if args.calc_dip: hkstack.stack_dip() else:", "args.startT is None: tstart = sta.startdate else: tstart = args.startT # Get search", "in args.bazbound.split(',')] args.bazbound = sorted(args.bazbound) if (len(args.bazbound)) != 2: parser.error( \"Error: --bazbound should", "| | '_ \\| |/ / #\") print(\"# | | | _| |_)", "action=\"store\", type=float, dest=\"binlim\", default=1, help=\"Specify the minimum number of RFs in each bin.", "\" + \"[Default None]\") PreGroup.add_argument( \"--no-outlier\", action=\"store_true\", dest=\"no_outl\", default=False, help=\"Set this option to", "+ \"comma-separated floats\") if args.bazbound is None: args.bazbound = [0.0, 360.0] else: args.bazbound", "\"comma-separated floats\") ## JMG ## if args.phase not in ['P', 'PP', 'allP', 'S',", "passed, load the RF data filename = folder / \"RF_Data.pkl\" if filename.is_file(): file", "\"Error: --hbound should contain 2 \" + \"comma-separated floats\") if args.kbound is None:", "< args.binlim): rfRstream.remove(tr) # Continue if stream is too short if len(rfRstream) <", "the copies stream (Hz). [Default [0.05, 0.35]]\") HKGroup = parser.add_argument_group( title='Settings for H-k", "if args.strike is None and args.dip is None: args.calc_dip = False args.nbaz =", "receiver function data\") TimeGroup.add_argument( \"--start\", action=\"store\", type=str, dest=\"startT\", default=\"\", help=\"Specify a UTCDateTime compatible", "rfV2=rfRstream_copy, strike=args.strike, dip=args.dip, vp=args.vp) except: hkstack = HkStack(rfRstream, strike=args.strike, dip=args.dip, vp=args.vp) # Update", "THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "\"on the variance. [Default False]\") PreGroup.add_argument( \"--slowbound\", action=\"store\", dest=\"slowbound\", type=str, default=None, help=\"Specify a", "print('Path to '+str(savepath)+' doesn`t exist - creating it') savepath.mkdir(parents=True) # Get search start", "\"Error: choose between 'P', 'PP', 'allP', 'S', 'SKS' and 'allS'.\") if args.phase ==", "+ \"extracting receiver functions. [Default None]\") PreGroup.add_argument( \"--cc\", action=\"store\", type=float, dest=\"cc\", default=-1., help=\"Specify", "'pdf'. [Default 'png']\") args = parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error(\"Input", "| | _| |_) | |_| | | | | | < #\")", "This file is part of RfPy. # # Permission is hereby granted, free", "hkstack.stack_dip() else: hkstack.stack() # Average stacks hkstack.average(typ=args.typ) if args.plot: hkstack.plot(args.save_plot, args.title, args.form) if", "weighted average (using weights), or 'product' for the product \" + \"of positive", "print(\"| Lon: {0:7.2f}; Lat: {1:6.2f} |\".format( sta.longitude, sta.latitude)) print(\"| Start time: {0:19s} |\".format(", "and 'allS'.\") if args.phase == 'allP': args.listphase = ['P', 'PP'] elif args.phase ==", "to increase verbosity.\") parser.add_argument( \"-O\", \"--overwrite\", action=\"store_true\", dest=\"ovr\", default=False, help=\"Force the overwriting of", "print(\"|===============================================|\") print(\"| {0:>8s} |\".format( sta.station)) print(\"|===============================================|\") print(\"|===============================================|\") print(\"| Station: {0:>2s}.{1:5s} |\".format( sta.network, sta.station))", "of back-azimuth bins to consider. \" + \"[Default 36]\") PreGroup.add_argument( \"--nslow\", action=\"store\", dest=\"nslow\",", "dip=args.dip, vp=args.vp) # Update attributes hkstack.hbound = args.hbound hkstack.kbound = args.kbound hkstack.dh =", "|_| | | | | | < #\") print(\"# |_| |_| | .__/", "OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "not Path('HK_PLOTS').is_dir(): Path('HK_PLOTS').mkdir(parents=True) print('') print(\"Number of radial RF bins: \" + str(len(rfRstream))) print('')", "args.copy: if args.bp_copy is None: args.bp_copy = [0.05, 0.35] else: args.bp_copy = [float(val)", "#\") print(\"# #\") print(\"#########################################\") print() # Run Input Parser args = get_hk_arguments() #", "option to save the HkStack object to file. \" + \"[Default doesn't save]\")", "sell # copies of the Software, and to permit persons to whom the", "\"RF_Data.pkl\" if filename.is_file(): file = open(filename, \"rb\") rfdata = pickle.load(file) rfRstream.append(rfdata[1]) file.close() if", "start times. [Default start date of station]\") TimeGroup.add_argument( \"--end\", action=\"store\", type=str, dest=\"endT\", default=\"\",", "azimuth (degrees). [Default [0, 360]]\") PreGroup.add_argument( \"--pws\", action=\"store_true\", dest=\"pws\", default=False, help=\"Set this option", "\"station end times [Default end date of station]\") PreGroup = parser.add_argument_group( title='Pre-processing Settings',", "/ \"RF_Data.pkl\" if filename.is_file(): file = open(filename, \"rb\") rfdata = pickle.load(file) rfRstream.append(rfdata[1]) file.close()", "# all copies or substantial portions of the Software. # # THE SOFTWARE", "continue # if meta.baz < args.bazbound[0] and meta.baz > args.bazbound[1]: # continue '''", "to include in searching for receiver function data\") TimeGroup.add_argument( \"--start\", action=\"store\", type=str, dest=\"startT\",", "original stream rfRstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True) # Initialize the HkStack object try:", "for stkey in list(stkeys): # Extract station information from dictionary sta = db[stkey]", "k. [Default 0.02]\") HKGroup.add_argument( \"--weights\", action=\"store\", type=str, dest=\"weights\", default=None, help=\"Specify a list of", "args.bazbound = [float(val) for val in args.bazbound.split(',')] args.bazbound = sorted(args.bazbound) if (len(args.bazbound)) !=", "all copies or substantial portions of the Software. # # THE SOFTWARE IS", "with minimum and maximum\" + \"bounds on back azimuth (degrees). [Default [0, 360]]\")", "(requires web connection) \"\"\" parser = ArgumentParser( usage=\"%(prog)s [arguments] <station database>\", description=\"Script used", "dest=\"dip\", default=None, help=\"Specify the dip of dipping Moho. [Default None]\") PlotGroup = parser.add_argument_group(", "<NAME> # # This file is part of RfPy. # # Permission is", "this option to save the HkStack object to file. \" + \"[Default doesn't", "help=\"Force folder names to use long-key form (NET.STN.CHN). \" + \"Default behaviour uses", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR", "two floats with minimum and maximum\" + \"bounds on back azimuth (degrees). [Default", "on search, weights, type of stacking, etc.\") HKGroup.add_argument( \"--hbound\", action=\"store\", type=str, dest=\"hbound\", default=None,", "\"of analysis\") else: args.calc_dip = True if args.bp is None: args.bp = [0.05,", "dest=\"lkey\", default=False, help=\"Force folder names to use long-key form (NET.STN.CHN). \" + \"Default", "action=\"store_true\", dest=\"copy\", default=False, help=\"Set this option to use a copy of the radial", "# SOFTWARE. # Import modules and functions import numpy as np import pickle", "form (NET.STN.CHN). \" + \"Default behaviour uses short key form (NET.STN) for the", "of radial RF data: \" + str(len(rfRstream))) print('') # Try binning if specified", "OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "print(\"# __ _ _ #\") print(\"# _ __ / _|_ __ _ _", "print(\"| Station: {0:>2s}.{1:5s} |\".format( sta.network, sta.station)) print(\"| Channel: {0:2s}; Locations: {1:15s} |\".format( sta.channel,", "type=float, dest=\"vp\", default=6.0, help=\"Specify mean crustal Vp (km/s). [Default 6.0]\") ModelGroup.add_argument( \"--strike\", action=\"store\",", "files (the \"Software\"), to deal # in the Software without restriction, including without", "binning if specified if args.calc_dip: rf_tmp = binning.bin_baz_slow(rfRstream, nbaz=args.nbaz+1, nslow=args.nslow+1, pws=args.pws) rfRstream =", "print('') # Filter original stream rfRstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True) # Initialize the", "None: args.bp_copy = [0.05, 0.35] else: args.bp_copy = [float(val) for val in args.bp_copy.split(',')]", "or tend < sta.startdate: continue # Temporary print locations tlocs = sta.location if", "['S', 'SKS'] else: args.listphase = [args.phase] if args.typ not in ['sum', 'product']: parser.error(", "\"comma-separated floats\") ## JMG ## if args.slowbound is None: args.slowbound = [0.04, 0.08]", "receiver function data \" + \"for H-k stacking.\") # General Settings parser.add_argument( \"indb\",", "get_hk_arguments() # Load Database db, stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys) # Track processed folders", "component and filter if args.copy: rfRstream_copy = rfRstream.copy() rfRstream_copy.filter('bandpass', freqmin=args.bp_copy[0], freqmax=args.bp_copy[1], corners=2, zerophase=True)", "meta.baz > args.bazbound[1]: # continue ''' # If everything passed, load the RF", "> tstart and dateUTC < tend: # Load meta data metafile = folder", "'jpg', 'eps', 'pdf'. [Default 'png']\") args = parser.parse_args(argv) # Check inputs if not", "__ _ _ #\") print(\"# _ __ / _|_ __ _ _ |", "following conditions: # # The above copyright notice and this permission notice shall", "|\".format( sta.network, sta.station)) print(\"| Channel: {0:2s}; Locations: {1:15s} |\".format( sta.channel, \",\".join(tlocs))) print(\"| Lon:", "of station]\") TimeGroup.add_argument( \"--end\", action=\"store\", type=str, dest=\"endT\", default=\"\", help=\"Specify a UTCDateTime compatible string", "[Default 0.02]\") HKGroup.add_argument( \"--weights\", action=\"store\", type=str, dest=\"weights\", default=None, help=\"Specify a list of three", "of the radial component and filter if args.copy: rfRstream_copy = rfRstream.copy() rfRstream_copy.filter('bandpass', freqmin=args.bp_copy[0],", "a comma separated list of station keys for \" + \"which to perform", "final stacking. Options are: 'sum' for \" + \"a weighted average (using weights),", "2.1]]\") HKGroup.add_argument( \"--dk\", action=\"store\", type=float, dest=\"dk\", default=0.02, help=\"Specify search interval for k. [Default", "(len(args.bazbound)) != 2: parser.error( \"Error: --bazbound should contain 2 \" + \"comma-separated floats\")", "The above copyright notice and this permission notice shall be included in #", "sta.location = tlocs # Update Display print(\" \") print(\" \") print(\"|===============================================|\") print(\"|===============================================|\") print(\"|", "dip=args.dip, vp=args.vp) except: hkstack = HkStack(rfRstream, strike=args.strike, dip=args.dip, vp=args.vp) # Update attributes hkstack.hbound", "= stkey if not args.lkey: stfld = stkey.split('.')[0]+\".\"+stkey.split('.')[1] # Define path to see", "hidden folders if folder.name.startswith('.'): continue date = folder.name.split('_')[0] year = date[0:4] month =", "False]\") PreGroup.add_argument( \"--bp-copy\", action=\"store\", dest=\"bp_copy\", type=str, default=None, help=\"Specify a list of two floats", "_ __ / _|_ __ _ _ | |__ | | __ #\")", "Copyright 2019 <NAME> # # This file is part of RfPy. # #", "action=\"store\", type=float, dest=\"vp\", default=6.0, help=\"Specify mean crustal Vp (km/s). [Default 6.0]\") ModelGroup.add_argument( \"--strike\",", "the folder \" + \"names, regardless of the key type of the database.\"", "if filename.is_file(): file = open(filename, \"rb\") rfdata = pickle.load(file) rfRstream.append(rfdata[1]) file.close() if rfdata[0].stats.npts", "outliers wrt variance within time range medvarR = np.median(varR) madvarR = 1.4826*np.median(np.abs(varR-medvarR)) robustR", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN", "\"[Default None]\") PreGroup.add_argument( \"--no-outlier\", action=\"store_true\", dest=\"no_outl\", default=False, help=\"Set this option to delete outliers", "\" + \"which to perform the analysis. These must be \" + \"contained", "help=\"Specify a list of two floats with minimum and maximum\" + \"frequency for", "functions import numpy as np import pickle import stdb from obspy.clients.fdsn import Client", "sorted(args.slowbound) if (len(args.slowbound)) != 2: parser.error( \"Error: --slowbound should contain 2 \" +", "parser.error( \"Error: --bazbound should contain 2 \" + \"comma-separated floats\") ## JMG ##", "len(tlocs[il]) == 0: tlocs[il] = \"--\" sta.location = tlocs # Update Display print(\"", "folder \" + \"names, regardless of the key type of the database.\" )", "information from dictionary sta = db[stkey] # Construct Folder Name stfld = stkey", "args.bp_copy.split(',')] args.bp_copy = sorted(args.bp_copy) if (len(args.bp_copy)) != 2: parser.error( \"Error: --bp_copy should contain", "2.5] for i in outliersR[::-1]: rfRstream.remove(rfRstream[i]) print('') print(\"Number of radial RF data: \"", "meta.slow < args.slowbound[0] and meta.slow > args.slowbound[1]: # continue # if meta.baz <", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "stacking.\") # General Settings parser.add_argument( \"indb\", help=\"Station Database to process from.\", type=str) parser.add_argument(", "is None: args.bp_copy = [0.05, 0.35] else: args.bp_copy = [float(val) for val in", "integer number of back-azimuth bins to consider. \" + \"[Default 36]\") PreGroup.add_argument( \"--nslow\",", "str(len(rfRstream))) print('') # Try binning if specified if args.calc_dip: rf_tmp = binning.bin_baz_slow(rfRstream, nbaz=args.nbaz+1,", "help=\"Specify the dip of dipping Moho. [Default None]\") PlotGroup = parser.add_argument_group( title='Settings for", "maximum\" + \"bounds on slowness (s/km). [Default [0.04, 0.08]]\") PreGroup.add_argument( \"--bazbound\", action=\"store\", dest=\"bazbound\",", "the database]\") parser.add_argument( \"-v\", \"-V\", \"--verbose\", action=\"store_true\", dest=\"verb\", default=False, help=\"Specify to increase verbosity.\")", "dest=\"kbound\", default=None, help=\"Specify a list of two floats with minimum and maximum\" +", "settings\") ModelGroup.add_argument( \"--vp\", action=\"store\", type=float, dest=\"vp\", default=6.0, help=\"Specify mean crustal Vp (km/s). [Default", "Construct Folder Name stfld = stkey if not args.lkey: stfld = stkey.split('.')[0]+\".\"+stkey.split('.')[1] #", "< sta.startdate: continue # Temporary print locations tlocs = sta.location if len(tlocs) ==", "for i in outliersR[::-1]: rfRstream.remove(rfRstream[i]) print('') print(\"Number of radial RF data: \" +", "month = date[4:6] day = date[6:8] dateUTC = UTCDateTime(year+'-'+month+'-'+day) if dateUTC > tstart", "[Default \" + \"does not produce plot]\") PlotGroup.add_argument( \"--save-plot\", action=\"store_true\", dest=\"save_plot\", default=False, help=\"Set", "= pickle.load(open(metafile, 'rb')) # Skip data not in list of phases if meta.phase", "args.save_plot and not Path('HK_PLOTS').is_dir(): Path('HK_PLOTS').mkdir(parents=True) print('') print(\"Number of radial RF bins: \" +", "notice shall be included in # all copies or substantial portions of the", "val in args.bazbound.split(',')] args.bazbound = sorted(args.bazbound) if (len(args.bazbound)) != 2: parser.error( \"Error: --bazbound", "help=\"Specify type of final stacking. Options are: 'sum' for \" + \"a weighted", "NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "number of RFs in each bin. [Default 3]\") PreGroup.add_argument( \"--bp\", action=\"store\", type=str, dest=\"bp\",", "50.]]\") HKGroup.add_argument( \"--dh\", action=\"store\", type=float, dest=\"dh\", default=0.5, help=\"Specify search interval for H (km).", "= date[0:4] month = date[4:6] day = date[6:8] dateUTC = UTCDateTime(year+'-'+month+'-'+day) if dateUTC", "= [float(val) for val in args.bazbound.split(',')] args.bazbound = sorted(args.bazbound) if (len(args.bazbound)) != 2:", "default=36, help=\"Specify integer number of back-azimuth bins to consider. \" + \"[Default 36]\")", "year = date[0:4] month = date[4:6] day = date[6:8] dateUTC = UTCDateTime(year+'-'+month+'-'+day) if", "date of station]\") TimeGroup.add_argument( \"--end\", action=\"store\", type=str, dest=\"endT\", default=\"\", help=\"Specify a UTCDateTime compatible", "False]\") PreGroup.add_argument( \"--slowbound\", action=\"store\", dest=\"slowbound\", type=str, default=None, help=\"Specify a list of two floats", "\"for H-k stacking.\") # General Settings parser.add_argument( \"indb\", help=\"Station Database to process from.\",", "functions. \" + \"[Default None]\") PreGroup.add_argument( \"--no-outlier\", action=\"store_true\", dest=\"no_outl\", default=False, help=\"Set this option", "print(\"# |_| |___/_____| #\") print(\"# #\") print(\"#########################################\") print() # Run Input Parser args", "| | | | < #\") print(\"# |_| |_| | .__/ \\__, |___|_|", "file. \" + \"[Default doesn't save]\") # Constants Settings ModelGroup = parser.add_argument_group( title='Model", "args.kbound = [float(val) for val in args.kbound.split(',')] args.kbound = sorted(args.kbound) if (len(args.kbound)) !=", "args.endT is None: tend = sta.enddate else: tend = args.endT if tstart >", "tstart = args.startT # Get search end time if args.endT is None: tend", "default=False, help=\"Set this option to save the plot [Default doesn't save]\") PlotGroup.add_argument( \"--title\",", "+ \"on the variance. [Default False]\") PreGroup.add_argument( \"--slowbound\", action=\"store\", dest=\"slowbound\", type=str, default=None, help=\"Specify", "etc.\") HKGroup.add_argument( \"--hbound\", action=\"store\", type=str, dest=\"hbound\", default=None, help=\"Specify a list of two floats", "H-k stacking.\") # General Settings parser.add_argument( \"indb\", help=\"Station Database to process from.\", type=str)", ".__/ \\__, |___|_| |_|_|\\_\\ #\") print(\"# |_| |___/_____| #\") print(\"# #\") print(\"#########################################\") print()", "construct UTCDateTime from start time: \" + args.startT) else: args.startT = None #", "!= 2: parser.error( \"Error: --kbound should contain 2 \" + \"comma-separated floats\") if", "receiver function \" + \"data prior to H-k stacking\") PreGroup.add_argument( \"--binlim\", action=\"store\", type=float,", "floats\") ## JMG ## if args.phase not in ['P', 'PP', 'allP', 'S', 'SKS',", "args.hbound = [20., 50.] else: args.hbound = [float(val) for val in args.hbound.split(',')] args.hbound", "+ str(len(rfRstream))) print('') # Filter original stream rfRstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True) #", "\"the IU network [Default processes all stations in the database]\") parser.add_argument( \"-v\", \"-V\",", "help=\"Specify search interval for H (km). [Default 0.5]\") HKGroup.add_argument( \"--kbound\", action=\"store\", type=str, dest=\"kbound\",", "2 \" + \"comma-separated floats\") if args.weights is None: args.weights = [0.5, 2.0,", "This will override any \" + \"station start times. [Default start date of", "of the radial component \" + \"filtered at different corners for the Pps", "= sorted(args.hbound) if (len(args.hbound)) != 2: parser.error( \"Error: --hbound should contain 2 \"", "'SKS' or 'allS'. \" + \"[Default 'allP']\") PreGroup.add_argument( \"--copy\", action=\"store_true\", dest=\"copy\", default=False, help=\"Set", "type of final stacking. Options are: 'sum' for \" + \"a weighted average", "| | __ #\") print(\"# | '__| |_| '_ \\| | | |", "obspy.clients.fdsn import Client from obspy.core import Stream, UTCDateTime from rfpy import binning, plotting,", "PreGroup.add_argument( \"--slowbound\", action=\"store\", dest=\"slowbound\", type=str, default=None, help=\"Specify a list of two floats with", "= 0. t2 = 30. varR = [] for i in range(len(rfRstream)): taxis", "this type \" + \"of analysis\") else: args.calc_dip = True if args.bp is", "= np.arange(len(rfRstream))[robustR > 2.5] for i in outliersR[::-1]: rfRstream.remove(rfRstream[i]) print('') print(\"Number of radial", "print('') print(\"Number of radial RF bins: \" + str(len(rfRstream))) print('') # Filter original", "a copy of the radial component and filter if args.copy: rfRstream_copy = rfRstream.copy()", "floats with minimum and maximum\" + \"frequency for the copies stream (Hz). [Default", "else: args.hbound = [float(val) for val in args.hbound.split(',')] args.hbound = sorted(args.hbound) if (len(args.hbound))", "uses short key form (NET.STN) for the folder \" + \"names, regardless of", "default=None, help=\"Specify the strike of dipping Moho. [Default None]\") ModelGroup.add_argument( \"--dip\", action=\"store\", type=float,", "floats\") ## JMG ## if args.slowbound is None: args.slowbound = [0.04, 0.08] else:", "ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "a list of two floats with minimum and maximum\" + \"frequency for the", "# Average stacks hkstack.average(typ=args.typ) if args.plot: hkstack.plot(args.save_plot, args.title, args.form) if args.save: filename =", "[0.04, 0.08]]\") PreGroup.add_argument( \"--bazbound\", action=\"store\", dest=\"bazbound\", type=str, default=None, help=\"Specify a list of two", "# Get a copy of the radial component and filter if args.copy: rfRstream_copy", "args.typ not in ['sum', 'product']: parser.error( \"Error: choose between 'sum' and 'product'\") if", "exist(args.indb): parser.error(\"Input file \" + args.indb + \" does not exist\") # create", "\" does not exist\") # create station key list if len(args.stkeys) > 0:", "dipping Moho. [Default None]\") ModelGroup.add_argument( \"--dip\", action=\"store\", type=float, dest=\"dip\", default=None, help=\"Specify the dip", "If everything passed, load the RF data filename = folder / \"RF_Data.pkl\" if", "function data\") TimeGroup.add_argument( \"--start\", action=\"store\", type=str, dest=\"startT\", default=\"\", help=\"Specify a UTCDateTime compatible string", "rfRstream.copy() rfRstream_copy.filter('bandpass', freqmin=args.bp_copy[0], freqmax=args.bp_copy[1], corners=2, zerophase=True) # Check bin counts: for tr in", "default=False, help=\"Set this option to delete outliers based on the MAD \" +", "a UTCDateTime compatible string representing \" + \"the end time for the search.", "default values and settings\") ModelGroup.add_argument( \"--vp\", action=\"store\", type=float, dest=\"vp\", default=6.0, help=\"Specify mean crustal", "IU network [Default processes all stations in the database]\") parser.add_argument( \"-v\", \"-V\", \"--verbose\",", "0.08]]\") PreGroup.add_argument( \"--bazbound\", action=\"store\", dest=\"bazbound\", type=str, default=None, help=\"Specify a list of two floats", "is None or args.dip is None: parser.error(\"Specify both strike and dip for this", "without dip if args.calc_dip: hkstack.stack_dip() else: hkstack.stack() # Average stacks hkstack.average(typ=args.typ) if args.plot:", "Parser args = get_hk_arguments() # Load Database db, stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys) #", "\" + \"Default behaviour uses short key form (NET.STN) for the folder \"", "Path('S_DATA') / stfld if not datapath.is_dir(): print('Path to ' + str(datapath) + '", "it exists if args.phase in ['P', 'PP', 'allP']: datapath = Path('P_DATA') / stfld", "to the following conditions: # # The above copyright notice and this permission", "-1.0] else: args.weights = [float(val) for val in args.weights.split(',')] if (len(args.weights)) != 3:", "'sum' and 'product'\") if args.copy: if args.bp_copy is None: args.bp_copy = [0.05, 0.35]", "bounds on data # if meta.slow < args.slowbound[0] and meta.slow > args.slowbound[1]: #", "in \" + \"the IU network [Default processes all stations in the database]\")", "else: args.endT = None if args.strike is None and args.dip is None: args.calc_dip", "\" + \"extracting receiver functions. [Default None]\") PreGroup.add_argument( \"--cc\", action=\"store\", type=float, dest=\"cc\", default=-1.,", "args.calc_dip: hkstack.stack_dip() else: hkstack.stack() # Average stacks hkstack.average(typ=args.typ) if args.plot: hkstack.plot(args.save_plot, args.title, args.form)", "tend: # Load meta data metafile = folder / \"Meta_Data.pkl\" if not metafile.is_file():", "2 \" + \"comma-separated floats\") if args.kbound is None: args.kbound = [1.56, 2.1]", "providing IU will match with all stations in \" + \"the IU network", "\"--plot\", action=\"store_true\", dest=\"plot\", default=False, help=\"Set this option to produce a plot of the", "dest=\"nbaz\", type=int, default=36, help=\"Specify integer number of back-azimuth bins to consider. \" +", "\"bounds on back azimuth (degrees). [Default [0, 360]]\") PreGroup.add_argument( \"--pws\", action=\"store_true\", dest=\"pws\", default=False,", "+ args.indb + \" does not exist\") # create station key list if", "default=False, help=\"Force the overwriting of pre-existing data. \" + \"[Default False]\") parser.add_argument( \"-L\",", "\"--cc\", action=\"store\", type=float, dest=\"cc\", default=-1., help=\"Specify the CC threshold for extracting receiver functions.", "for \" + \"a weighted average (using weights), or 'product' for the product", "_|_ __ _ _ | |__ | | __ #\") print(\"# | '__|", "locations tlocs = sta.location if len(tlocs) == 0: tlocs = [''] for il", "values and settings\") ModelGroup.add_argument( \"--vp\", action=\"store\", type=float, dest=\"vp\", default=6.0, help=\"Specify mean crustal Vp", "dest=\"snrh\", default=-9999, help=\"Specify the horizontal component SNR threshold for \" + \"extracting receiver", "action=\"store\", type=str, dest=\"phase\", default='allP', help=\"Specify the phase name to plot. \" + \"Options", "Software is # furnished to do so, subject to the following conditions: #", "action=\"store_true\", dest=\"verb\", default=False, help=\"Specify to increase verbosity.\") parser.add_argument( \"-O\", \"--overwrite\", action=\"store_true\", dest=\"ovr\", default=False,", "of two floats with minimum and maximum\" + \"bounds on Moho depth (H,", "args.slowbound = [float(val) for val in args.slowbound.split(',')] args.slowbound = sorted(args.slowbound) if (len(args.slowbound)) !=", "'rb')) # Skip data not in list of phases if meta.phase not in", "part of RfPy. # # Permission is hereby granted, free of charge, to", "''' # If everything passed, load the RF data filename = folder /", "to process receiver function data \" + \"for H-k stacking.\") # General Settings", "on-the-fly (requires web connection) \"\"\" parser = ArgumentParser( usage=\"%(prog)s [arguments] <station database>\", description=\"Script", "HKGroup.add_argument( \"--kbound\", action=\"store\", type=str, dest=\"kbound\", default=None, help=\"Specify a list of two floats with", "il in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = \"--\" sta.location =", "print() # Run Input Parser args = get_hk_arguments() # Load Database db, stkeys", "\"--dip\", action=\"store\", type=float, dest=\"dip\", default=None, help=\"Specify the dip of dipping Moho. [Default None]\")", "val in args.kbound.split(',')] args.kbound = sorted(args.kbound) if (len(args.kbound)) != 2: parser.error( \"Error: --kbound", "len(tlocs) == 0: tlocs = [''] for il in range(0, len(tlocs)): if len(tlocs[il])", "bandpass filter. \" + \"[Default 0.05,0.5]\") PreGroup.add_argument( \"--nbaz\", action=\"store\", dest=\"nbaz\", type=int, default=36, help=\"Specify", "args.bazbound[1]: # continue ''' # If everything passed, load the RF data filename", "of the stacks [Default \" + \"does not produce plot]\") PlotGroup.add_argument( \"--save-plot\", action=\"store_true\",", "= [float(val) for val in args.slowbound.split(',')] args.slowbound = sorted(args.slowbound) if (len(args.slowbound)) != 2:", "the analysis. These must be \" + \"contained within the station database. Partial", "numpy as np import pickle import stdb from obspy.clients.fdsn import Client from obspy.core", "compatible string representing \" + \"the start time for the search. This will", "Check for folder already processed if stfld in procfold: print(' {0} already processed...skipping", "args.phase not in ['P', 'PP', 'allP', 'S', 'SKS', 'allS']: parser.error( \"Error: choose between", "\"Error: --bp_copy should contain 2 \" + \"comma-separated floats\") if args.hbound is None:", "minimum and maximum\" + \"bounds on Vp/Vs (k). [Default [1.56, 2.1]]\") HKGroup.add_argument( \"--dk\",", "[Default [1.56, 2.1]]\") HKGroup.add_argument( \"--dk\", action=\"store\", type=float, dest=\"dk\", default=0.02, help=\"Specify search interval for", "+ \"[Default None]\") PreGroup.add_argument( \"--snrh\", action=\"store\", type=float, dest=\"snrh\", default=-9999, help=\"Specify the horizontal component", "DEALINGS IN THE # SOFTWARE. # Import modules and functions import numpy as", "folders if folder.name.startswith('.'): continue date = folder.name.split('_')[0] year = date[0:4] month = date[4:6]", "and not Path('HK_PLOTS').is_dir(): Path('HK_PLOTS').mkdir(parents=True) print('') print(\"Number of radial RF bins: \" + str(len(rfRstream)))", "if args.weights is None: args.weights = [0.5, 2.0, -1.0] else: args.weights = [float(val)", "already processed if stfld in procfold: print(' {0} already processed...skipping '.format(stfld)) continue rfRstream", "folder in datafiles: # Skip hidden folders if folder.name.startswith('.'): continue date = folder.name.split('_')[0]", "PreGroup.add_argument( \"--nbaz\", action=\"store\", dest=\"nbaz\", type=int, default=36, help=\"Specify integer number of back-azimuth bins to", "rfRstream.remove(rfRstream[i]) print('') print(\"Number of radial RF data: \" + str(len(rfRstream))) print('') # Try", "inputs if not exist(args.indb): parser.error(\"Input file \" + args.indb + \" does not", "help=\"Specify the horizontal component SNR threshold for \" + \"extracting receiver functions. [Default", "rf_tmp[0] else: rf_tmp = binning.bin(rfRstream, typ='slow', nbin=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0] # Get", "\" + str(len(rfRstream))) print('') # Filter original stream rfRstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True)", "if (len(args.weights)) != 3: parser.error( \"Error: --weights should contain 3 \" + \"comma-separated", "analysis. These must be \" + \"contained within the station database. Partial keys", "np.median(varR) madvarR = 1.4826*np.median(np.abs(varR-medvarR)) robustR = np.abs((varR-medvarR)/madvarR) outliersR = np.arange(len(rfRstream))[robustR > 2.5] for", "extracting receiver functions. \" + \"[Default None]\") PreGroup.add_argument( \"--snrh\", action=\"store\", type=float, dest=\"snrh\", default=-9999,", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT", "continue date = folder.name.split('_')[0] year = date[0:4] month = date[4:6] day = date[6:8]", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "Get Options from :class:`~optparse.OptionParser` objects. This function is used for data processing on-the-fly", "Get search start time if args.startT is None: tstart = sta.startdate else: tstart", "args.phase in ['S', 'SKS', 'allS']: datapath = Path('S_DATA') / stfld if not datapath.is_dir():", "+ \"names, regardless of the key type of the database.\" ) # Event", "[Default 6.0]\") ModelGroup.add_argument( \"--strike\", action=\"store\", type=float, dest=\"strike\", default=None, help=\"Specify the strike of dipping", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the", "--weights should contain 3 \" + \"comma-separated floats\") return args def main(): print()", "the following conditions: # # The above copyright notice and this permission notice", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND", "'__| |_| '_ \\| | | | | '_ \\| |/ / #\")", "rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #", "for Ps, Pps and Pass \" + \"weights in final stack. [Default [0.5,", "Stack with or without dip if args.calc_dip: hkstack.stack_dip() else: hkstack.stack() # Average stacks", "\"--nslow\", action=\"store\", dest=\"nslow\", type=int, default=40, help=\"Specify integer number of slowness bins to consider.", "to match against those in the dictionary. For \" + \"instance, providing IU", "args.hbound = sorted(args.hbound) if (len(args.hbound)) != 2: parser.error( \"Error: --hbound should contain 2", "OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "# Get search start time if args.startT is None: tstart = sta.startdate else:", "type of the database.\" ) # Event Selection Criteria TimeGroup = parser.add_argument_group( title=\"Time", "if args.endT is None: tend = sta.enddate else: tend = args.endT if tstart", "print(\"# |_| |_| | .__/ \\__, |___|_| |_|_|\\_\\ #\") print(\"# |_| |___/_____| #\")", "or without dip if args.calc_dip: hkstack.stack_dip() else: hkstack.stack() # Average stacks hkstack.average(typ=args.typ) if", "\"bounds on slowness (s/km). [Default [0.04, 0.08]]\") PreGroup.add_argument( \"--bazbound\", action=\"store\", dest=\"bazbound\", type=str, default=None,", "|_| | .__/ \\__, |___|_| |_|_|\\_\\ #\") print(\"# |_| |___/_____| #\") print(\"# #\")", "Moho depth (H, in km). [Default [20., 50.]]\") HKGroup.add_argument( \"--dh\", action=\"store\", type=float, dest=\"dh\",", "None: tstart = sta.startdate else: tstart = args.startT # Get search end time", "for the Pps and Pss phases. \" + \"[Default False]\") PreGroup.add_argument( \"--bp-copy\", action=\"store\",", "to do so, subject to the following conditions: # # The above copyright", "\" + \"instance, providing IU will match with all stations in \" +", "the dictionary. For \" + \"instance, providing IU will match with all stations", "specified if args.calc_dip: rf_tmp = binning.bin_baz_slow(rfRstream, nbaz=args.nbaz+1, nslow=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0] else:", "freqmin=args.bp_copy[0], freqmax=args.bp_copy[1], corners=2, zerophase=True) # Check bin counts: for tr in rfRstream: if", "0.08] else: args.slowbound = [float(val) for val in args.slowbound.split(',')] args.slowbound = sorted(args.slowbound) if", "(len(args.hbound)) != 2: parser.error( \"Error: --hbound should contain 2 \" + \"comma-separated floats\")", "of two floats with minimum and maximum\" + \"bounds on back azimuth (degrees).", "!= 1451: print(folder) if len(rfRstream) == 0: continue if args.no_outl: t1 = 0.", "for H-k Stacking', description=\"Specify parameters of H-k search, including\" + \"bounds on search,", "except: parser.error( \"Cannot construct UTCDateTime from start time: \" + args.startT) else: args.startT", "dest=\"plot\", default=False, help=\"Set this option to produce a plot of the stacks [Default", "= sta.startdate else: tstart = args.startT # Get search end time if args.endT", "list of three floats with for Ps, Pps and Pass \" + \"weights", "from start time: \" + args.startT) else: args.startT = None # construct end", "# Run Input Parser args = get_hk_arguments() # Load Database db, stkeys =", "## JMG ## if args.phase not in ['P', 'PP', 'allP', 'S', 'SKS', 'allS']:", "prior to H-k stacking\") PreGroup.add_argument( \"--binlim\", action=\"store\", type=float, dest=\"binlim\", default=1, help=\"Specify the minimum", "tselect = (taxis > t1) & (taxis < t2) varR.append(np.var(rfRstream[i].data[tselect])) varR = np.array(varR)", "if args.slowbound is None: args.slowbound = [0.04, 0.08] else: args.slowbound = [float(val) for", "valid\" + \"matplotlib formats: 'png', 'jpg', 'eps', 'pdf'. [Default 'png']\") args = parser.parse_args(argv)", "and Pss phases. \" + \"[Default False]\") PreGroup.add_argument( \"--bp-copy\", action=\"store\", dest=\"bp_copy\", type=str, default=None,", "[Default [0.04, 0.08]]\") PreGroup.add_argument( \"--bazbound\", action=\"store\", dest=\"bazbound\", type=str, default=None, help=\"Specify a list of", "ModelGroup.add_argument( \"--dip\", action=\"store\", type=float, dest=\"dip\", default=None, help=\"Specify the dip of dipping Moho. [Default", "import stdb from obspy.clients.fdsn import Client from obspy.core import Stream, UTCDateTime from rfpy", "savepath / (hkstack.rfV1[0].stats.station + \".hkstack.\"+args.typ+\".pkl\") hkstack.save(file=filename) # Update processed folders procfold.append(stfld) if __name__", "dest=\"typ\", default=\"sum\", help=\"Specify type of final stacking. Options are: 'sum' for \" +", "val in args.slowbound.split(',')] args.slowbound = sorted(args.slowbound) if (len(args.slowbound)) != 2: parser.error( \"Error: --slowbound", "without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense,", "processed...skipping '.format(stfld)) continue rfRstream = Stream() datafiles = [x for x in datapath.iterdir()", "\"--save-plot\", action=\"store_true\", dest=\"save_plot\", default=False, help=\"Set this option to save the plot [Default doesn't", "default=6.0, help=\"Specify mean crustal Vp (km/s). [Default 6.0]\") ModelGroup.add_argument( \"--strike\", action=\"store\", type=float, dest=\"strike\",", "\"rb\") rfdata = pickle.load(file) rfRstream.append(rfdata[1]) file.close() if rfdata[0].stats.npts != 1451: print(folder) if len(rfRstream)", "> args.bazbound[1]: # continue ''' # If everything passed, load the RF data", "2: parser.error( \"Error: --hbound should contain 2 \" + \"comma-separated floats\") if args.kbound", "args.weights # Stack with or without dip if args.calc_dip: hkstack.stack_dip() else: hkstack.stack() #", "args.slowbound = [0.04, 0.08] else: args.slowbound = [float(val) for val in args.slowbound.split(',')] args.slowbound", "else: args.slowbound = [float(val) for val in args.slowbound.split(',')] args.slowbound = sorted(args.slowbound) if (len(args.slowbound))", "'png', 'jpg', 'eps', 'pdf'. [Default 'png']\") args = parser.parse_args(argv) # Check inputs if", "permission notice shall be included in # all copies or substantial portions of", "for il in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = \"--\" sta.location", "madvarR = 1.4826*np.median(np.abs(varR-medvarR)) robustR = np.abs((varR-medvarR)/madvarR) outliersR = np.arange(len(rfRstream))[robustR > 2.5] for i", "copy of the radial component and filter if args.copy: rfRstream_copy = rfRstream.copy() rfRstream_copy.filter('bandpass',", "len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT) except: parser.error( \"Cannot construct UTCDateTime from", "[Default has no title]\") PlotGroup.add_argument( \"--format\", action=\"store\", type=str, dest=\"form\", default=\"png\", help=\"Specify format of", "attributes hkstack.hbound = args.hbound hkstack.kbound = args.kbound hkstack.dh = args.dh hkstack.dk = args.dk", "Start time: {0:19s} |\".format( sta.startdate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"| End time: {0:19s} |\".format( sta.enddate.strftime(\"%Y-%m-%d %H:%M:%S\")))", "argparse import ArgumentParser from os.path import exists as exist from numpy import nan", "Stacking', description=\"Specify parameters of H-k search, including\" + \"bounds on search, weights, type", "tend = sta.enddate else: tend = args.endT if tstart > sta.enddate or tend", "'allS']: parser.error( \"Error: choose between 'P', 'PP', 'allP', 'S', 'SKS' and 'allS'.\") if", "keys will \" + \"be used to match against those in the dictionary.", "'PP'] elif args.phase == 'allS': args.listphase = ['S', 'SKS'] else: args.listphase = [args.phase]", "'allP', 'S', 'SKS', 'allS']: parser.error( \"Error: choose between 'P', 'PP', 'allP', 'S', 'SKS'", "'P', 'PP', 'allP', 'S', 'SKS' and 'allS'.\") if args.phase == 'allP': args.listphase =", "\"Error: --bp should contain 2 \" + \"comma-separated floats\") ## JMG ## if", "action=\"store\", type=str, dest=\"stkeys\", default=\"\", help=\"Specify a comma separated list of station keys for", "in procfold: print(' {0} already processed...skipping '.format(stfld)) continue rfRstream = Stream() datafiles =", "one of the valid\" + \"matplotlib formats: 'png', 'jpg', 'eps', 'pdf'. [Default 'png']\")", "to ' + str(datapath) + ' doesn`t exist - continuing') continue # Define", "the MAD \" + \"on the variance. [Default False]\") PreGroup.add_argument( \"--slowbound\", action=\"store\", dest=\"slowbound\",", "stfld = stkey if not args.lkey: stfld = stkey.split('.')[0]+\".\"+stkey.split('.')[1] # Define path to", "if not args.lkey: stfld = stkey.split('.')[0]+\".\"+stkey.split('.')[1] # Define path to see if it", "= [x for x in datapath.iterdir() if x.is_dir()] for folder in datafiles: #", "Vp (km/s). [Default 6.0]\") ModelGroup.add_argument( \"--strike\", action=\"store\", type=float, dest=\"strike\", default=None, help=\"Specify the strike", "should contain 2 \" + \"comma-separated floats\") if args.bazbound is None: args.bazbound =", "stacking. Options are: 'sum' for \" + \"a weighted average (using weights), or", "> 0: args.stkeys = args.stkeys.split(',') # construct start time if len(args.startT) > 0:", "help=\"Set this option to use a copy of the radial component \" +", "= rf_tmp[0] # Get a copy of the radial component and filter if", "exists as exist from numpy import nan def get_hk_arguments(argv=None): \"\"\" Get Options from", "= [args.phase] if args.typ not in ['sum', 'product']: parser.error( \"Error: choose between 'sum'", "_ _ #\") print(\"# _ __ / _|_ __ _ _ | |__", "freqmax=args.bp[1], corners=2, zerophase=True) # Initialize the HkStack object try: hkstack = HkStack(rfRstream, rfV2=rfRstream_copy,", "\"weights in final stack. [Default [0.5, 2., -1.]]\") HKGroup.add_argument( \"--type\", action=\"store\", type=str, dest=\"typ\",", "tlocs # Update Display print(\" \") print(\" \") print(\"|===============================================|\") print(\"|===============================================|\") print(\"| {0:>8s} |\".format(", "shall be included in # all copies or substantial portions of the Software.", "creating it') savepath.mkdir(parents=True) # Get search start time if args.startT is None: tstart", "tstart = sta.startdate else: tstart = args.startT # Get search end time if", "\" + \"comma-separated floats\") return args def main(): print() print(\"#########################################\") print(\"# __ _", "print(\" \") print(\" \") print(\"|===============================================|\") print(\"|===============================================|\") print(\"| {0:>8s} |\".format( sta.station)) print(\"|===============================================|\") print(\"|===============================================|\") print(\"|", "type=str, dest=\"weights\", default=None, help=\"Specify a list of three floats with for Ps, Pps", "+ \"bounds on Vp/Vs (k). [Default [1.56, 2.1]]\") HKGroup.add_argument( \"--dk\", action=\"store\", type=float, dest=\"dk\",", "plot. \" + \"Options are 'P', 'PP', 'allP', 'S', 'SKS' or 'allS'. \"", "to see if it exists if args.phase in ['P', 'PP', 'allP']: datapath =", "= sta.enddate else: tend = args.endT if tstart > sta.enddate or tend <", "component \" + \"filtered at different corners for the Pps and Pss phases.", "plot]\") PlotGroup.add_argument( \"--save-plot\", action=\"store_true\", dest=\"save_plot\", default=False, help=\"Set this option to save the plot", "args.hbound is None: args.hbound = [20., 50.] else: args.hbound = [float(val) for val", "= pickle.load(file) rfRstream.append(rfdata[1]) file.close() if rfdata[0].stats.npts != 1451: print(folder) if len(rfRstream) == 0:", "= parser.add_argument_group( title='Settings for plotting results', description=\"Specify parameters for plotting the H-k stacks.\")", "# Initialize the HkStack object try: hkstack = HkStack(rfRstream, rfV2=rfRstream_copy, strike=args.strike, dip=args.dip, vp=args.vp)", "numpy import nan def get_hk_arguments(argv=None): \"\"\" Get Options from :class:`~optparse.OptionParser` objects. This function", "Check inputs if not exist(args.indb): parser.error(\"Input file \" + args.indb + \" does", "modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and", "ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "[float(val) for val in args.bp.split(',')] args.bp = sorted(args.bp) if (len(args.bp)) != 2: parser.error(", "|_) | |_| | | | | | < #\") print(\"# |_| |_|", "meta data metafile = folder / \"Meta_Data.pkl\" if not metafile.is_file(): continue meta =", "print(\"| End time: {0:19s} |\".format( sta.enddate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"|-----------------------------------------------|\") # Check for folder already", "for data processing on-the-fly (requires web connection) \"\"\" parser = ArgumentParser( usage=\"%(prog)s [arguments]", "[Default False]\") PreGroup.add_argument( \"--phase\", action=\"store\", type=str, dest=\"phase\", default='allP', help=\"Specify the phase name to", "tstart and dateUTC < tend: # Load meta data metafile = folder /", "--hbound should contain 2 \" + \"comma-separated floats\") if args.kbound is None: args.kbound", "+ \"comma-separated floats\") if args.weights is None: args.weights = [0.5, 2.0, -1.0] else:", "Thresholding if meta.snrh < args.snrh: continue if meta.snr < args.snr: continue if meta.cc", "publish, distribute, sublicense, and/or sell # copies of the Software, and to permit", "= [0.0, 360.0] else: args.bazbound = [float(val) for val in args.bazbound.split(',')] args.bazbound =", "print(\"|===============================================|\") print(\"|===============================================|\") print(\"| {0:>8s} |\".format( sta.station)) print(\"|===============================================|\") print(\"|===============================================|\") print(\"| Station: {0:>2s}.{1:5s} |\".format( sta.network,", "Pps and Pass \" + \"weights in final stack. [Default [0.5, 2., -1.]]\")", "type=str, dest=\"stkeys\", default=\"\", help=\"Specify a comma separated list of station keys for \"", "for i in range(len(rfRstream)): taxis = rfRstream[i].stats.taxis tselect = (taxis > t1) &", "# Track processed folders procfold = [] # Loop over station keys for", "False]\") PreGroup.add_argument( \"--phase\", action=\"store\", type=str, dest=\"phase\", default='allP', help=\"Specify the phase name to plot.", "+ \"of analysis\") else: args.calc_dip = True if args.bp is None: args.bp =", "args.startT # Get search end time if args.endT is None: tend = sta.enddate", "str(len(rfRstream))) print('') # Filter original stream rfRstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True) # Initialize", "and maximum\" + \"frequency for the copies stream (Hz). [Default [0.05, 0.35]]\") HKGroup", "if args.calc_dip: rf_tmp = binning.bin_baz_slow(rfRstream, nbaz=args.nbaz+1, nslow=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0] else: rf_tmp", "None]\") PreGroup.add_argument( \"--snrh\", action=\"store\", type=float, dest=\"snrh\", default=-9999, help=\"Specify the horizontal component SNR threshold", "RFs in each bin. [Default 3]\") PreGroup.add_argument( \"--bp\", action=\"store\", type=str, dest=\"bp\", default=None, help=\"Specify", "dest=\"slowbound\", type=str, default=None, help=\"Specify a list of two floats with minimum and maximum\"", "is None: args.calc_dip = False args.nbaz = None elif args.strike is None or", "== 0: tlocs[il] = \"--\" sta.location = tlocs # Update Display print(\" \")", "continue if meta.snr < args.snr: continue if meta.cc < args.cc: continue ''' #", "None: args.calc_dip = False args.nbaz = None elif args.strike is None or args.dip", "help=\"Specify the minimum number of RFs in each bin. [Default 3]\") PreGroup.add_argument( \"--bp\",", "binning.bin(rfRstream, typ='slow', nbin=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0] # Get a copy of the", "representing \" + \"the end time for the search. This will override any", "default=\"sum\", help=\"Specify type of final stacking. Options are: 'sum' for \" + \"a", "dest=\"bp_copy\", type=str, default=None, help=\"Specify a list of two floats with minimum and maximum\"", "if (tr.stats.nbin < args.binlim): rfRstream.remove(tr) # Continue if stream is too short if", "\"does not produce plot]\") PlotGroup.add_argument( \"--save-plot\", action=\"store_true\", dest=\"save_plot\", default=False, help=\"Set this option to", "process from.\", type=str) parser.add_argument( \"--keys\", action=\"store\", type=str, dest=\"stkeys\", default=\"\", help=\"Specify a comma separated", "type=float, dest=\"snrh\", default=-9999, help=\"Specify the horizontal component SNR threshold for \" + \"extracting", "in args.bp_copy.split(',')] args.bp_copy = sorted(args.bp_copy) if (len(args.bp_copy)) != 2: parser.error( \"Error: --bp_copy should", "is None and args.dip is None: args.calc_dip = False args.nbaz = None elif", "50.] else: args.hbound = [float(val) for val in args.hbound.split(',')] args.hbound = sorted(args.hbound) if", "## if args.phase not in ['P', 'PP', 'allP', 'S', 'SKS', 'allS']: parser.error( \"Error:", "Skip hidden folders if folder.name.startswith('.'): continue date = folder.name.split('_')[0] year = date[0:4] month", "Initialize the HkStack object try: hkstack = HkStack(rfRstream, rfV2=rfRstream_copy, strike=args.strike, dip=args.dip, vp=args.vp) except:", "\"extracting receiver functions. [Default None]\") PreGroup.add_argument( \"--cc\", action=\"store\", type=float, dest=\"cc\", default=-1., help=\"Specify the", "[Default None]\") ModelGroup.add_argument( \"--dip\", action=\"store\", type=float, dest=\"dip\", default=None, help=\"Specify the dip of dipping", "{1:15s} |\".format( sta.channel, \",\".join(tlocs))) print(\"| Lon: {0:7.2f}; Lat: {1:6.2f} |\".format( sta.longitude, sta.latitude)) print(\"|", "for val in args.weights.split(',')] if (len(args.weights)) != 3: parser.error( \"Error: --weights should contain", "if not savepath.is_dir(): print('Path to '+str(savepath)+' doesn`t exist - creating it') savepath.mkdir(parents=True) #", "H-k Stacking', description=\"Specify parameters of H-k search, including\" + \"bounds on search, weights,", "args.endT = None if args.strike is None and args.dip is None: args.calc_dip =", "args.listphase: continue # QC Thresholding if meta.snrh < args.snrh: continue if meta.snr <", "to use phase-weighted stacking during binning \" + \" [Default False]\") PreGroup.add_argument( \"--phase\",", "processes all stations in the database]\") parser.add_argument( \"-v\", \"-V\", \"--verbose\", action=\"store_true\", dest=\"verb\", default=False,", "for H (km). [Default 0.5]\") HKGroup.add_argument( \"--kbound\", action=\"store\", type=str, dest=\"kbound\", default=None, help=\"Specify a", "args.title, args.form) if args.save: filename = savepath / (hkstack.rfV1[0].stats.station + \".hkstack.\"+args.typ+\".pkl\") hkstack.save(file=filename) #", "exist - continuing') continue # Define save path if args.save: savepath = Path('HK_DATA')", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR", "if args.save: savepath = Path('HK_DATA') / stfld if not savepath.is_dir(): print('Path to '+str(savepath)+'", "for val in args.bp.split(',')] args.bp = sorted(args.bp) if (len(args.bp)) != 2: parser.error( \"Error:", "on data # if meta.slow < args.slowbound[0] and meta.slow > args.slowbound[1]: # continue", "search, including\" + \"bounds on search, weights, type of stacking, etc.\") HKGroup.add_argument( \"--hbound\",", "if args.bp_copy is None: args.bp_copy = [0.05, 0.35] else: args.bp_copy = [float(val) for", ":class:`~optparse.OptionParser` objects. This function is used for data processing on-the-fly (requires web connection)", "action=\"store\", type=float, dest=\"snr\", default=-9999., help=\"Specify the SNR threshold for extracting receiver functions. \"", "a list of two floats with minimum and maximum\" + \"bounds on back", "args.phase == 'allP': args.listphase = ['P', 'PP'] elif args.phase == 'allS': args.listphase =", "'sum' for \" + \"a weighted average (using weights), or 'product' for the", "tlocs[il] = \"--\" sta.location = tlocs # Update Display print(\" \") print(\" \")", "folder already processed if stfld in procfold: print(' {0} already processed...skipping '.format(stfld)) continue", "save]\") PlotGroup.add_argument( \"--title\", action=\"store\", type=str, dest=\"title\", default=\"\", help=\"Specify plot title [Default has no", "variance within time range medvarR = np.median(varR) madvarR = 1.4826*np.median(np.abs(varR-medvarR)) robustR = np.abs((varR-medvarR)/madvarR)", "be any one of the valid\" + \"matplotlib formats: 'png', 'jpg', 'eps', 'pdf'.", "if meta.slow < args.slowbound[0] and meta.slow > args.slowbound[1]: # continue # if meta.baz", "stkey if not args.lkey: stfld = stkey.split('.')[0]+\".\"+stkey.split('.')[1] # Define path to see if", "help=\"Station Database to process from.\", type=str) parser.add_argument( \"--keys\", action=\"store\", type=str, dest=\"stkeys\", default=\"\", help=\"Specify", "[] for i in range(len(rfRstream)): taxis = rfRstream[i].stats.taxis tselect = (taxis > t1)", "\"comma-separated floats\") if args.weights is None: args.weights = [0.5, 2.0, -1.0] else: args.weights", "Define path to see if it exists if args.phase in ['P', 'PP', 'allP']:", "\"comma-separated floats\") if args.kbound is None: args.kbound = [1.56, 2.1] else: args.kbound =", "# in the Software without restriction, including without limitation the rights # to", "meta.snr < args.snr: continue if meta.cc < args.cc: continue ''' # Check bounds", "of phases if meta.phase not in args.listphase: continue # QC Thresholding if meta.snrh", "= binning.bin(rfRstream, typ='slow', nbin=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0] # Get a copy of", "HkStack(rfRstream, rfV2=rfRstream_copy, strike=args.strike, dip=args.dip, vp=args.vp) except: hkstack = HkStack(rfRstream, strike=args.strike, dip=args.dip, vp=args.vp) #", "usage=\"%(prog)s [arguments] <station database>\", description=\"Script used to process receiver function data \" +", "'png']\") args = parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error(\"Input file \"", "crustal Vp (km/s). [Default 6.0]\") ModelGroup.add_argument( \"--strike\", action=\"store\", type=float, dest=\"strike\", default=None, help=\"Specify the", "Lat: {1:6.2f} |\".format( sta.longitude, sta.latitude)) print(\"| Start time: {0:19s} |\".format( sta.startdate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"|", "\"frequency for the copies stream (Hz). [Default [0.05, 0.35]]\") HKGroup = parser.add_argument_group( title='Settings", "args.endT) else: args.endT = None if args.strike is None and args.dip is None:", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS", "= tlocs # Update Display print(\" \") print(\" \") print(\"|===============================================|\") print(\"|===============================================|\") print(\"| {0:>8s}", "action=\"store_true\", dest=\"plot\", default=False, help=\"Set this option to produce a plot of the stacks", "H-k stacking\") PreGroup.add_argument( \"--binlim\", action=\"store\", type=float, dest=\"binlim\", default=1, help=\"Specify the minimum number of", "action=\"store\", type=str, dest=\"startT\", default=\"\", help=\"Specify a UTCDateTime compatible string representing \" + \"the", "parser.add_argument( \"-v\", \"-V\", \"--verbose\", action=\"store_true\", dest=\"verb\", default=False, help=\"Specify to increase verbosity.\") parser.add_argument( \"-O\",", "+ \"data prior to H-k stacking\") PreGroup.add_argument( \"--binlim\", action=\"store\", type=float, dest=\"binlim\", default=1, help=\"Specify", "radial RF bins: \" + str(len(rfRstream))) print('') # Filter original stream rfRstream.filter('bandpass', freqmin=args.bp[0],", "rfRstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True) # Initialize the HkStack object try: hkstack =", "else: args.calc_dip = True if args.bp is None: args.bp = [0.05, 0.5] else:", "time range medvarR = np.median(varR) madvarR = 1.4826*np.median(np.abs(varR-medvarR)) robustR = np.abs((varR-medvarR)/madvarR) outliersR =", "| |_| | | | | | < #\") print(\"# |_| |_| |", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE", "None]\") PreGroup.add_argument( \"--cc\", action=\"store\", type=float, dest=\"cc\", default=-1., help=\"Specify the CC threshold for extracting", "2: parser.error( \"Error: --bp_copy should contain 2 \" + \"comma-separated floats\") if args.hbound", "# Constants Settings ModelGroup = parser.add_argument_group( title='Model Settings', description=\"Miscellaneous default values and settings\")", "of receiver function \" + \"data prior to H-k stacking\") PreGroup.add_argument( \"--binlim\", action=\"store\",", "two floats with minimum and maximum\" + \"frequency for the copies stream (Hz).", "Moho. [Default None]\") ModelGroup.add_argument( \"--dip\", action=\"store\", type=float, dest=\"dip\", default=None, help=\"Specify the dip of", "{0:7.2f}; Lat: {1:6.2f} |\".format( sta.longitude, sta.latitude)) print(\"| Start time: {0:19s} |\".format( sta.startdate.strftime(\"%Y-%m-%d %H:%M:%S\")))", "continue ''' # If everything passed, load the RF data filename = folder", "# Temporary print locations tlocs = sta.location if len(tlocs) == 0: tlocs =", "nbaz=args.nbaz+1, nslow=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0] else: rf_tmp = binning.bin(rfRstream, typ='slow', nbin=args.nslow+1, pws=args.pws)", "start time for the search. This will override any \" + \"station start", "component SNR threshold for \" + \"extracting receiver functions. [Default None]\") PreGroup.add_argument( \"--cc\",", "Try binning if specified if args.calc_dip: rf_tmp = binning.bin_baz_slow(rfRstream, nbaz=args.nbaz+1, nslow=args.nslow+1, pws=args.pws) rfRstream", "print(\"|===============================================|\") print(\"| Station: {0:>2s}.{1:5s} |\".format( sta.network, sta.station)) print(\"| Channel: {0:2s}; Locations: {1:15s} |\".format(", "save the HkStack object to file. \" + \"[Default doesn't save]\") # Constants", "medvarR = np.median(varR) madvarR = 1.4826*np.median(np.abs(varR-medvarR)) robustR = np.abs((varR-medvarR)/madvarR) outliersR = np.arange(len(rfRstream))[robustR >", "None elif args.strike is None or args.dip is None: parser.error(\"Specify both strike and", "in args.slowbound.split(',')] args.slowbound = sorted(args.slowbound) if (len(args.slowbound)) != 2: parser.error( \"Error: --slowbound should", "or args.dip is None: parser.error(\"Specify both strike and dip for this type \"", "the CC threshold for extracting receiver functions. \" + \"[Default None]\") PreGroup.add_argument( \"--no-outlier\",", "\"--strike\", action=\"store\", type=float, dest=\"strike\", default=None, help=\"Specify the strike of dipping Moho. [Default None]\")", "(H, in km). [Default [20., 50.]]\") HKGroup.add_argument( \"--dh\", action=\"store\", type=float, dest=\"dh\", default=0.5, help=\"Specify", "to use long-key form (NET.STN.CHN). \" + \"Default behaviour uses short key form", "Skip data not in list of phases if meta.phase not in args.listphase: continue", "WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "data\") TimeGroup.add_argument( \"--start\", action=\"store\", type=str, dest=\"startT\", default=\"\", help=\"Specify a UTCDateTime compatible string representing", "try: args.endT = UTCDateTime(args.endT) except: parser.error( \"Cannot construct UTCDateTime from end time: \"", "everything passed, load the RF data filename = folder / \"RF_Data.pkl\" if filename.is_file():", "= 30. varR = [] for i in range(len(rfRstream)): taxis = rfRstream[i].stats.taxis tselect", "\" + \"comma-separated floats\") ## JMG ## if args.phase not in ['P', 'PP',", "data filename = folder / \"RF_Data.pkl\" if filename.is_file(): file = open(filename, \"rb\") rfdata", "for the search. This will override any \" + \"station end times [Default", "if (len(args.slowbound)) != 2: parser.error( \"Error: --slowbound should contain 2 \" + \"comma-separated", "sta.network, sta.station)) print(\"| Channel: {0:2s}; Locations: {1:15s} |\".format( sta.channel, \",\".join(tlocs))) print(\"| Lon: {0:7.2f};", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER", "# Get search end time if args.endT is None: tend = sta.enddate else:", "\"--phase\", action=\"store\", type=str, dest=\"phase\", default='allP', help=\"Specify the phase name to plot. \" +", "a list of two floats with minimum and maximum\" + \"bounds on slowness", "Stream() datafiles = [x for x in datapath.iterdir() if x.is_dir()] for folder in", "0. t2 = 30. varR = [] for i in range(len(rfRstream)): taxis =", "dest=\"bazbound\", type=str, default=None, help=\"Specify a list of two floats with minimum and maximum\"", "\"--pws\", action=\"store_true\", dest=\"pws\", default=False, help=\"Set this option to use phase-weighted stacking during binning", "list of two floats with minimum and maximum\" + \"bounds on Moho depth", "parser.error( \"Error: --bp_copy should contain 2 \" + \"comma-separated floats\") if args.hbound is", "{0:19s} |\".format( sta.startdate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"| End time: {0:19s} |\".format( sta.enddate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"|-----------------------------------------------|\") #", "|\".format( sta.enddate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"|-----------------------------------------------|\") # Check for folder already processed if stfld in", "2: parser.error( \"Error: --kbound should contain 2 \" + \"comma-separated floats\") if args.weights", "Options from :class:`~optparse.OptionParser` objects. This function is used for data processing on-the-fly (requires", "the H-k stacks.\") PlotGroup.add_argument( \"--plot\", action=\"store_true\", dest=\"plot\", default=False, help=\"Set this option to produce", "key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') # construct start time", "copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED", "will \" + \"be used to match against those in the dictionary. For", "different corners for the Pps and Pss phases. \" + \"[Default False]\") PreGroup.add_argument(", "'_ \\| |/ / #\") print(\"# | | | _| |_) | |_|", "len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = \"--\" sta.location = tlocs # Update", "--bp should contain 2 \" + \"comma-separated floats\") ## JMG ## if args.slowbound", "help=\"Specify the corner frequencies for the bandpass filter. \" + \"[Default 0.05,0.5]\") PreGroup.add_argument(", "if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') # construct start time if len(args.startT)", "parser.error(\"Input file \" + args.indb + \" does not exist\") # create station", "parser.add_argument_group( title='Pre-processing Settings', description=\"Options for pre-processing of receiver function \" + \"data prior", "[0.05, 0.5] else: args.bp = [float(val) for val in args.bp.split(',')] args.bp = sorted(args.bp)", "''' # Check bounds on data # if meta.slow < args.slowbound[0] and meta.slow", "parser.error( \"Error: choose between 'P', 'PP', 'allP', 'S', 'SKS' and 'allS'.\") if args.phase", "print locations tlocs = sta.location if len(tlocs) == 0: tlocs = [''] for", "Permission is hereby granted, free of charge, to any person obtaining a copy", "sta.enddate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"|-----------------------------------------------|\") # Check for folder already processed if stfld in procfold:", "the stacks [Default \" + \"does not produce plot]\") PlotGroup.add_argument( \"--save-plot\", action=\"store_true\", dest=\"save_plot\",", "args.dip is None: args.calc_dip = False args.nbaz = None elif args.strike is None", "|\".format( sta.startdate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"| End time: {0:19s} |\".format( sta.enddate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"|-----------------------------------------------|\") # Check", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT", "Software without restriction, including without limitation the rights # to use, copy, modify,", "= [20., 50.] else: args.hbound = [float(val) for val in args.hbound.split(',')] args.hbound =", "[0.5, 2., -1.]]\") HKGroup.add_argument( \"--type\", action=\"store\", type=str, dest=\"typ\", default=\"sum\", help=\"Specify type of final", "for folder in datafiles: # Skip hidden folders if folder.name.startswith('.'): continue date =", "use a copy of the radial component \" + \"filtered at different corners", "this option to use a copy of the radial component \" + \"filtered", "stations in the database]\") parser.add_argument( \"-v\", \"-V\", \"--verbose\", action=\"store_true\", dest=\"verb\", default=False, help=\"Specify to", "# The above copyright notice and this permission notice shall be included in", "\"--nbaz\", action=\"store\", dest=\"nbaz\", type=int, default=36, help=\"Specify integer number of back-azimuth bins to consider.", "# of this software and associated documentation files (the \"Software\"), to deal #", "OTHER DEALINGS IN THE # SOFTWARE. # Import modules and functions import numpy", "\" + \"[Default 0.05,0.5]\") PreGroup.add_argument( \"--nbaz\", action=\"store\", dest=\"nbaz\", type=int, default=36, help=\"Specify integer number", "binning, plotting, HkStack from pathlib import Path from argparse import ArgumentParser from os.path", "+ \"[Default doesn't save]\") # Constants Settings ModelGroup = parser.add_argument_group( title='Model Settings', description=\"Miscellaneous", "Options are: 'sum' for \" + \"a weighted average (using weights), or 'product'", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE.", "substantial portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\",", "not in list of phases if meta.phase not in args.listphase: continue # QC", "meta.snrh < args.snrh: continue if meta.snr < args.snr: continue if meta.cc < args.cc:", "args.kbound hkstack.dh = args.dh hkstack.dk = args.dk hkstack.weights = args.weights # Stack with", "import Path from argparse import ArgumentParser from os.path import exists as exist from", "(s/km). [Default [0.04, 0.08]]\") PreGroup.add_argument( \"--bazbound\", action=\"store\", dest=\"bazbound\", type=str, default=None, help=\"Specify a list", "THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # Import modules and", "RF bins: \" + str(len(rfRstream))) print('') # Filter original stream rfRstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1],", "time: {0:19s} |\".format( sta.enddate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"|-----------------------------------------------|\") # Check for folder already processed if", "restriction, including without limitation the rights # to use, copy, modify, merge, publish,", "option to save the plot [Default doesn't save]\") PlotGroup.add_argument( \"--title\", action=\"store\", type=str, dest=\"title\",", "description=\"Script used to process receiver function data \" + \"for H-k stacking.\") #", "pre-processing of receiver function \" + \"data prior to H-k stacking\") PreGroup.add_argument( \"--binlim\",", "help=\"Specify integer number of back-azimuth bins to consider. \" + \"[Default 36]\") PreGroup.add_argument(", "type=float, dest=\"snr\", default=-9999., help=\"Specify the SNR threshold for extracting receiver functions. \" +", "hkstack = HkStack(rfRstream, rfV2=rfRstream_copy, strike=args.strike, dip=args.dip, vp=args.vp) except: hkstack = HkStack(rfRstream, strike=args.strike, dip=args.dip,", "Criteria TimeGroup = parser.add_argument_group( title=\"Time Settings\", description=\"Settings associated with refining \" + \"the", "\" + \"contained within the station database. Partial keys will \" + \"be", "_| |_) | |_| | | | | | < #\") print(\"# |_|", "args.slowbound[0] and meta.slow > args.slowbound[1]: # continue # if meta.baz < args.bazbound[0] and", "stream rfRstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2, zerophase=True) # Initialize the HkStack object try: hkstack", "import binning, plotting, HkStack from pathlib import Path from argparse import ArgumentParser from", "\"--\" sta.location = tlocs # Update Display print(\" \") print(\" \") print(\"|===============================================|\") print(\"|===============================================|\")", "'SKS', 'allS']: parser.error( \"Error: choose between 'P', 'PP', 'allP', 'S', 'SKS' and 'allS'.\")", "sta = db[stkey] # Construct Folder Name stfld = stkey if not args.lkey:", "on Vp/Vs (k). [Default [1.56, 2.1]]\") HKGroup.add_argument( \"--dk\", action=\"store\", type=float, dest=\"dk\", default=0.02, help=\"Specify", "None: args.bp = [0.05, 0.5] else: args.bp = [float(val) for val in args.bp.split(',')]", "stacks.\") PlotGroup.add_argument( \"--plot\", action=\"store_true\", dest=\"plot\", default=False, help=\"Set this option to produce a plot", "default=\"\", help=\"Specify a UTCDateTime compatible string representing \" + \"the end time for", "the search. This will override any \" + \"station end times [Default end", "\" + \" [Default False]\") PreGroup.add_argument( \"--phase\", action=\"store\", type=str, dest=\"phase\", default='allP', help=\"Specify the", "description=\"Specify parameters for plotting the H-k stacks.\") PlotGroup.add_argument( \"--plot\", action=\"store_true\", dest=\"plot\", default=False, help=\"Set", "= np.abs((varR-medvarR)/madvarR) outliersR = np.arange(len(rfRstream))[robustR > 2.5] for i in outliersR[::-1]: rfRstream.remove(rfRstream[i]) print('')", "0: continue if args.no_outl: t1 = 0. t2 = 30. varR = []", "= date[6:8] dateUTC = UTCDateTime(year+'-'+month+'-'+day) if dateUTC > tstart and dateUTC < tend:", "[0.04, 0.08] else: args.slowbound = [float(val) for val in args.slowbound.split(',')] args.slowbound = sorted(args.slowbound)", "!= 2: parser.error( \"Error: --bazbound should contain 2 \" + \"comma-separated floats\") ##", "default=40, help=\"Specify integer number of slowness bins to consider. \" + \"[Default 40]\")", "Path('HK_DATA') / stfld if not savepath.is_dir(): print('Path to '+str(savepath)+' doesn`t exist - creating", "in args.listphase: continue # QC Thresholding if meta.snrh < args.snrh: continue if meta.snr", "or 'product' for the product \" + \"of positive values in stacks. [Default", "not metafile.is_file(): continue meta = pickle.load(open(metafile, 'rb')) # Skip data not in list", "= rfRstream.copy() rfRstream_copy.filter('bandpass', freqmin=args.bp_copy[0], freqmax=args.bp_copy[1], corners=2, zerophase=True) # Check bin counts: for tr", "parser.error( \"Error: --bp should contain 2 \" + \"comma-separated floats\") ## JMG ##", "|\".format( sta.station)) print(\"|===============================================|\") print(\"|===============================================|\") print(\"| Station: {0:>2s}.{1:5s} |\".format( sta.network, sta.station)) print(\"| Channel: {0:2s};", "\"--bazbound\", action=\"store\", dest=\"bazbound\", type=str, default=None, help=\"Specify a list of two floats with minimum", "Run Input Parser args = get_hk_arguments() # Load Database db, stkeys = stdb.io.load_db(fname=args.indb,", "format of figure. Can be any one of the valid\" + \"matplotlib formats:", "args.startT = None # construct end time if len(args.endT) > 0: try: args.endT", "PreGroup.add_argument( \"--bazbound\", action=\"store\", dest=\"bazbound\", type=str, default=None, help=\"Specify a list of two floats with", "description=\"Options for pre-processing of receiver function \" + \"data prior to H-k stacking\")", "360]]\") PreGroup.add_argument( \"--pws\", action=\"store_true\", dest=\"pws\", default=False, help=\"Set this option to use phase-weighted stacking", "+ \"be used to match against those in the dictionary. For \" +", "except: parser.error( \"Cannot construct UTCDateTime from end time: \" + args.endT) else: args.endT", "args.kbound = sorted(args.kbound) if (len(args.kbound)) != 2: parser.error( \"Error: --kbound should contain 2", "radial RF data: \" + str(len(rfRstream))) print('') # Try binning if specified if", "three floats with for Ps, Pps and Pass \" + \"weights in final", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR", "Extract station information from dictionary sta = db[stkey] # Construct Folder Name stfld", "args.snrh: continue if meta.snr < args.snr: continue if meta.cc < args.cc: continue '''", "binning.bin_baz_slow(rfRstream, nbaz=args.nbaz+1, nslow=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0] else: rf_tmp = binning.bin(rfRstream, typ='slow', nbin=args.nslow+1,", "dictionary. For \" + \"instance, providing IU will match with all stations in", "used to match against those in the dictionary. For \" + \"instance, providing", "not in ['sum', 'product']: parser.error( \"Error: choose between 'sum' and 'product'\") if args.copy:", "HkStack object to file. \" + \"[Default doesn't save]\") # Constants Settings ModelGroup", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "\"the start time for the search. This will override any \" + \"station", "search. This will override any \" + \"station end times [Default end date", "np.arange(len(rfRstream))[robustR > 2.5] for i in outliersR[::-1]: rfRstream.remove(rfRstream[i]) print('') print(\"Number of radial RF", "[float(val) for val in args.kbound.split(',')] args.kbound = sorted(args.kbound) if (len(args.kbound)) != 2: parser.error(", "contain 2 \" + \"comma-separated floats\") ## JMG ## if args.phase not in", "\\| | | | | '_ \\| |/ / #\") print(\"# | |", "construct UTCDateTime from end time: \" + args.endT) else: args.endT = None if", "= sorted(args.bp_copy) if (len(args.bp_copy)) != 2: parser.error( \"Error: --bp_copy should contain 2 \"", "are 'P', 'PP', 'allP', 'S', 'SKS' or 'allS'. \" + \"[Default 'allP']\") PreGroup.add_argument(", "else: args.listphase = [args.phase] if args.typ not in ['sum', 'product']: parser.error( \"Error: choose", "station information from dictionary sta = db[stkey] # Construct Folder Name stfld =", "associated documentation files (the \"Software\"), to deal # in the Software without restriction,", "= [0.04, 0.08] else: args.slowbound = [float(val) for val in args.slowbound.split(',')] args.slowbound =", "# Update Display print(\" \") print(\" \") print(\"|===============================================|\") print(\"|===============================================|\") print(\"| {0:>8s} |\".format( sta.station))", "RF data: \" + str(len(rfRstream))) print('') # Try binning if specified if args.calc_dip:", "of this software and associated documentation files (the \"Software\"), to deal # in", "pre-existing data. \" + \"[Default False]\") parser.add_argument( \"-L\", \"--long-name\", action=\"store_true\", dest=\"lkey\", default=False, help=\"Force", "refining \" + \"the times to include in searching for receiver function data\")", "/ \"Meta_Data.pkl\" if not metafile.is_file(): continue meta = pickle.load(open(metafile, 'rb')) # Skip data", "# # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "savepath = Path('HK_DATA') / stfld if not savepath.is_dir(): print('Path to '+str(savepath)+' doesn`t exist", "args.stkeys.split(',') # construct start time if len(args.startT) > 0: try: args.startT = UTCDateTime(args.startT)", "the Pps and Pss phases. \" + \"[Default False]\") PreGroup.add_argument( \"--bp-copy\", action=\"store\", dest=\"bp_copy\",", "args.save: filename = savepath / (hkstack.rfV1[0].stats.station + \".hkstack.\"+args.typ+\".pkl\") hkstack.save(file=filename) # Update processed folders", "<station database>\", description=\"Script used to process receiver function data \" + \"for H-k", "args.calc_dip = True if args.bp is None: args.bp = [0.05, 0.5] else: args.bp", "args.nbaz = None elif args.strike is None or args.dip is None: parser.error(\"Specify both", "> args.slowbound[1]: # continue # if meta.baz < args.bazbound[0] and meta.baz > args.bazbound[1]:", "dateUTC = UTCDateTime(year+'-'+month+'-'+day) if dateUTC > tstart and dateUTC < tend: # Load", "for k. [Default 0.02]\") HKGroup.add_argument( \"--weights\", action=\"store\", type=str, dest=\"weights\", default=None, help=\"Specify a list", "Moho. [Default None]\") PlotGroup = parser.add_argument_group( title='Settings for plotting results', description=\"Specify parameters for", "and dip for this type \" + \"of analysis\") else: args.calc_dip = True", "= (taxis > t1) & (taxis < t2) varR.append(np.var(rfRstream[i].data[tselect])) varR = np.array(varR) #", "\"[Default False]\") PreGroup.add_argument( \"--bp-copy\", action=\"store\", dest=\"bp_copy\", type=str, default=None, help=\"Specify a list of two", "override any \" + \"station start times. [Default start date of station]\") TimeGroup.add_argument(", "all stations in \" + \"the IU network [Default processes all stations in", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of", "else: args.weights = [float(val) for val in args.weights.split(',')] if (len(args.weights)) != 3: parser.error(", "in range(len(rfRstream)): taxis = rfRstream[i].stats.taxis tselect = (taxis > t1) & (taxis <", "corners=2, zerophase=True) # Check bin counts: for tr in rfRstream: if (tr.stats.nbin <", "used for data processing on-the-fly (requires web connection) \"\"\" parser = ArgumentParser( usage=\"%(prog)s", "= [float(val) for val in args.bp_copy.split(',')] args.bp_copy = sorted(args.bp_copy) if (len(args.bp_copy)) != 2:", "stacks hkstack.average(typ=args.typ) if args.plot: hkstack.plot(args.save_plot, args.title, args.form) if args.save: filename = savepath /", "hkstack.average(typ=args.typ) if args.plot: hkstack.plot(args.save_plot, args.title, args.form) if args.save: filename = savepath / (hkstack.rfV1[0].stats.station", "'_ \\| | | | | '_ \\| |/ / #\") print(\"# |", "integer number of slowness bins to consider. \" + \"[Default 40]\") PreGroup.add_argument( \"--snr\",", "'product' for the product \" + \"of positive values in stacks. [Default 'sum']\")", "title=\"Time Settings\", description=\"Settings associated with refining \" + \"the times to include in", "EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "in list of phases if meta.phase not in args.listphase: continue # QC Thresholding", "# Check bounds on data # if meta.slow < args.slowbound[0] and meta.slow >", "default=False, help=\"Set this option to produce a plot of the stacks [Default \"", "maximum\" + \"bounds on Vp/Vs (k). [Default [1.56, 2.1]]\") HKGroup.add_argument( \"--dk\", action=\"store\", type=float,", "the Software without restriction, including without limitation the rights # to use, copy,", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN", "args.slowbound = sorted(args.slowbound) if (len(args.slowbound)) != 2: parser.error( \"Error: --slowbound should contain 2", "\"Cannot construct UTCDateTime from end time: \" + args.endT) else: args.endT = None", "person obtaining a copy # of this software and associated documentation files (the", "args.bp_copy = [float(val) for val in args.bp_copy.split(',')] args.bp_copy = sorted(args.bp_copy) if (len(args.bp_copy)) !=", "\"bounds on Moho depth (H, in km). [Default [20., 50.]]\") HKGroup.add_argument( \"--dh\", action=\"store\",", "Station: {0:>2s}.{1:5s} |\".format( sta.network, sta.station)) print(\"| Channel: {0:2s}; Locations: {1:15s} |\".format( sta.channel, \",\".join(tlocs)))", "radial component and filter if args.copy: rfRstream_copy = rfRstream.copy() rfRstream_copy.filter('bandpass', freqmin=args.bp_copy[0], freqmax=args.bp_copy[1], corners=2,", "[1.56, 2.1]]\") HKGroup.add_argument( \"--dk\", action=\"store\", type=float, dest=\"dk\", default=0.02, help=\"Specify search interval for k.", "range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = \"--\" sta.location = tlocs #", "0.5] else: args.bp = [float(val) for val in args.bp.split(',')] args.bp = sorted(args.bp) if", "type=int, default=36, help=\"Specify integer number of back-azimuth bins to consider. \" + \"[Default", "filter if args.copy: rfRstream_copy = rfRstream.copy() rfRstream_copy.filter('bandpass', freqmin=args.bp_copy[0], freqmax=args.bp_copy[1], corners=2, zerophase=True) # Check", "|/ / #\") print(\"# | | | _| |_) | |_| | |", "average (using weights), or 'product' for the product \" + \"of positive values", "action=\"store\", dest=\"bazbound\", type=str, default=None, help=\"Specify a list of two floats with minimum and", "type=str, dest=\"phase\", default='allP', help=\"Specify the phase name to plot. \" + \"Options are", "args.bazbound is None: args.bazbound = [0.0, 360.0] else: args.bazbound = [float(val) for val", "x.is_dir()] for folder in datafiles: # Skip hidden folders if folder.name.startswith('.'): continue date", "dest=\"bp\", default=None, help=\"Specify the corner frequencies for the bandpass filter. \" + \"[Default", "# Skip hidden folders if folder.name.startswith('.'): continue date = folder.name.split('_')[0] year = date[0:4]", "above copyright notice and this permission notice shall be included in # all", "hkstack.kbound = args.kbound hkstack.dh = args.dh hkstack.dk = args.dk hkstack.weights = args.weights #", "[Default None]\") PlotGroup = parser.add_argument_group( title='Settings for plotting results', description=\"Specify parameters for plotting", "help=\"Specify integer number of slowness bins to consider. \" + \"[Default 40]\") PreGroup.add_argument(", "+ \"bounds on Moho depth (H, in km). [Default [20., 50.]]\") HKGroup.add_argument( \"--dh\",", "# continue # if meta.baz < args.bazbound[0] and meta.baz > args.bazbound[1]: # continue", "in km). [Default [20., 50.]]\") HKGroup.add_argument( \"--dh\", action=\"store\", type=float, dest=\"dh\", default=0.5, help=\"Specify search", "Path from argparse import ArgumentParser from os.path import exists as exist from numpy", "dest=\"form\", default=\"png\", help=\"Specify format of figure. Can be any one of the valid\"", "default=-9999., help=\"Specify the SNR threshold for extracting receiver functions. \" + \"[Default None]\")", "persons to whom the Software is # furnished to do so, subject to", "no title]\") PlotGroup.add_argument( \"--format\", action=\"store\", type=str, dest=\"form\", default=\"png\", help=\"Specify format of figure. Can", "!= 2: parser.error( \"Error: --bp should contain 2 \" + \"comma-separated floats\") ##", "conditions: # # The above copyright notice and this permission notice shall be", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A", "False]\") parser.add_argument( \"-L\", \"--long-name\", action=\"store_true\", dest=\"lkey\", default=False, help=\"Force folder names to use long-key", "type=str, dest=\"bp\", default=None, help=\"Specify the corner frequencies for the bandpass filter. \" +", "frequencies for the bandpass filter. \" + \"[Default 0.05,0.5]\") PreGroup.add_argument( \"--nbaz\", action=\"store\", dest=\"nbaz\",", "already processed...skipping '.format(stfld)) continue rfRstream = Stream() datafiles = [x for x in", "args.slowbound[1]: # continue # if meta.baz < args.bazbound[0] and meta.baz > args.bazbound[1]: #", "OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "documentation files (the \"Software\"), to deal # in the Software without restriction, including", "UTCDateTime from start time: \" + args.startT) else: args.startT = None # construct", "[args.phase] if args.typ not in ['sum', 'product']: parser.error( \"Error: choose between 'sum' and", "or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS", "folders procfold = [] # Loop over station keys for stkey in list(stkeys):", "floats with minimum and maximum\" + \"bounds on Vp/Vs (k). [Default [1.56, 2.1]]\")", "print(\"| Start time: {0:19s} |\".format( sta.startdate.strftime(\"%Y-%m-%d %H:%M:%S\"))) print(\"| End time: {0:19s} |\".format( sta.enddate.strftime(\"%Y-%m-%d", "data not in list of phases if meta.phase not in args.listphase: continue #", "notice and this permission notice shall be included in # all copies or", "of charge, to any person obtaining a copy # of this software and", "elif args.phase in ['S', 'SKS', 'allS']: datapath = Path('S_DATA') / stfld if not", "+ \"frequency for the copies stream (Hz). [Default [0.05, 0.35]]\") HKGroup = parser.add_argument_group(", "stdb from obspy.clients.fdsn import Client from obspy.core import Stream, UTCDateTime from rfpy import", "+ \"which to perform the analysis. These must be \" + \"contained within", "\" + \"data prior to H-k stacking\") PreGroup.add_argument( \"--binlim\", action=\"store\", type=float, dest=\"binlim\", default=1,", "# Load meta data metafile = folder / \"Meta_Data.pkl\" if not metafile.is_file(): continue", "of pre-existing data. \" + \"[Default False]\") parser.add_argument( \"-L\", \"--long-name\", action=\"store_true\", dest=\"lkey\", default=False,", "= sta.location if len(tlocs) == 0: tlocs = [''] for il in range(0,", "help=\"Specify the SNR threshold for extracting receiver functions. \" + \"[Default None]\") PreGroup.add_argument(", "args.weights is None: args.weights = [0.5, 2.0, -1.0] else: args.weights = [float(val) for", "is None: args.bp = [0.05, 0.5] else: args.bp = [float(val) for val in", "and settings\") ModelGroup.add_argument( \"--vp\", action=\"store\", type=float, dest=\"vp\", default=6.0, help=\"Specify mean crustal Vp (km/s).", "list of two floats with minimum and maximum\" + \"bounds on back azimuth", "\"--verbose\", action=\"store_true\", dest=\"verb\", default=False, help=\"Specify to increase verbosity.\") parser.add_argument( \"-O\", \"--overwrite\", action=\"store_true\", dest=\"ovr\",", "stfld elif args.phase in ['S', 'SKS', 'allS']: datapath = Path('S_DATA') / stfld if", "parser.add_argument( \"-L\", \"--long-name\", action=\"store_true\", dest=\"lkey\", default=False, help=\"Force folder names to use long-key form", "included in # all copies or substantial portions of the Software. # #", "help=\"Force the overwriting of pre-existing data. \" + \"[Default False]\") parser.add_argument( \"-L\", \"--long-name\",", "|_| |_| | .__/ \\__, |___|_| |_|_|\\_\\ #\") print(\"# |_| |___/_____| #\") print(\"#", "args.bp = sorted(args.bp) if (len(args.bp)) != 2: parser.error( \"Error: --bp should contain 2", "the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "contain 2 \" + \"comma-separated floats\") if args.bazbound is None: args.bazbound = [0.0,", "!= 2: parser.error( \"Error: --hbound should contain 2 \" + \"comma-separated floats\") if", "[x for x in datapath.iterdir() if x.is_dir()] for folder in datafiles: # Skip", "and associated documentation files (the \"Software\"), to deal # in the Software without", "bins: \" + str(len(rfRstream))) print('') # Filter original stream rfRstream.filter('bandpass', freqmin=args.bp[0], freqmax=args.bp[1], corners=2,", "savepath.mkdir(parents=True) # Get search start time if args.startT is None: tstart = sta.startdate", "OR OTHER DEALINGS IN THE # SOFTWARE. # Import modules and functions import", "36]\") PreGroup.add_argument( \"--nslow\", action=\"store\", dest=\"nslow\", type=int, default=40, help=\"Specify integer number of slowness bins", "HkStack(rfRstream, strike=args.strike, dip=args.dip, vp=args.vp) # Update attributes hkstack.hbound = args.hbound hkstack.kbound = args.kbound", "of dipping Moho. [Default None]\") PlotGroup = parser.add_argument_group( title='Settings for plotting results', description=\"Specify", "rfdata = pickle.load(file) rfRstream.append(rfdata[1]) file.close() if rfdata[0].stats.npts != 1451: print(folder) if len(rfRstream) ==", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #", "from end time: \" + args.endT) else: args.endT = None if args.strike is", "PreGroup.add_argument( \"--pws\", action=\"store_true\", dest=\"pws\", default=False, help=\"Set this option to use phase-weighted stacking during", "action=\"store_true\", dest=\"save\", default=False, help=\"Set this option to save the HkStack object to file.", "representing \" + \"the start time for the search. This will override any", "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "\"[Default None]\") PreGroup.add_argument( \"--snrh\", action=\"store\", type=float, dest=\"snrh\", default=-9999, help=\"Specify the horizontal component SNR", "PlotGroup = parser.add_argument_group( title='Settings for plotting results', description=\"Specify parameters for plotting the H-k", "range medvarR = np.median(varR) madvarR = 1.4826*np.median(np.abs(varR-medvarR)) robustR = np.abs((varR-medvarR)/madvarR) outliersR = np.arange(len(rfRstream))[robustR", "type=str) parser.add_argument( \"--keys\", action=\"store\", type=str, dest=\"stkeys\", default=\"\", help=\"Specify a comma separated list of", "OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "for val in args.bp_copy.split(',')] args.bp_copy = sorted(args.bp_copy) if (len(args.bp_copy)) != 2: parser.error( \"Error:", "db[stkey] # Construct Folder Name stfld = stkey if not args.lkey: stfld =", "each bin. [Default 3]\") PreGroup.add_argument( \"--bp\", action=\"store\", type=str, dest=\"bp\", default=None, help=\"Specify the corner", "\"Error: choose between 'sum' and 'product'\") if args.copy: if args.bp_copy is None: args.bp_copy", "network [Default processes all stations in the database]\") parser.add_argument( \"-v\", \"-V\", \"--verbose\", action=\"store_true\",", "/ #\") print(\"# | | | _| |_) | |_| | | |", "else: args.startT = None # construct end time if len(args.endT) > 0: try:", "= Stream() datafiles = [x for x in datapath.iterdir() if x.is_dir()] for folder", "if not metafile.is_file(): continue meta = pickle.load(open(metafile, 'rb')) # Skip data not in", "modules and functions import numpy as np import pickle import stdb from obspy.clients.fdsn", "any \" + \"station start times. [Default start date of station]\") TimeGroup.add_argument( \"--end\",", "= sorted(args.bazbound) if (len(args.bazbound)) != 2: parser.error( \"Error: --bazbound should contain 2 \"", "action=\"store\", dest=\"bp_copy\", type=str, default=None, help=\"Specify a list of two floats with minimum and", "continue if args.save_plot and not Path('HK_PLOTS').is_dir(): Path('HK_PLOTS').mkdir(parents=True) print('') print(\"Number of radial RF bins:", "if args.bazbound is None: args.bazbound = [0.0, 360.0] else: args.bazbound = [float(val) for", "Path('HK_PLOTS').is_dir(): Path('HK_PLOTS').mkdir(parents=True) print('') print(\"Number of radial RF bins: \" + str(len(rfRstream))) print('') #", "stdb.io.load_db(fname=args.indb, keys=args.stkeys) # Track processed folders procfold = [] # Loop over station", "of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "search. This will override any \" + \"station start times. [Default start date", "verbosity.\") parser.add_argument( \"-O\", \"--overwrite\", action=\"store_true\", dest=\"ovr\", default=False, help=\"Force the overwriting of pre-existing data.", "= parser.add_argument_group( title='Model Settings', description=\"Miscellaneous default values and settings\") ModelGroup.add_argument( \"--vp\", action=\"store\", type=float,", "if args.hbound is None: args.hbound = [20., 50.] else: args.hbound = [float(val) for", "+ \"comma-separated floats\") if args.hbound is None: args.hbound = [20., 50.] else: args.hbound", "of RfPy. # # Permission is hereby granted, free of charge, to any", "[Default start date of station]\") TimeGroup.add_argument( \"--end\", action=\"store\", type=str, dest=\"endT\", default=\"\", help=\"Specify a", "\"[Default 40]\") PreGroup.add_argument( \"--snr\", action=\"store\", type=float, dest=\"snr\", default=-9999., help=\"Specify the SNR threshold for", "args.startT) else: args.startT = None # construct end time if len(args.endT) > 0:", "with all stations in \" + \"the IU network [Default processes all stations", "\"[Default doesn't save]\") # Constants Settings ModelGroup = parser.add_argument_group( title='Model Settings', description=\"Miscellaneous default", "# Define save path if args.save: savepath = Path('HK_DATA') / stfld if not", "bins to consider. \" + \"[Default 40]\") PreGroup.add_argument( \"--snr\", action=\"store\", type=float, dest=\"snr\", default=-9999.,", "\" + \"comma-separated floats\") if args.hbound is None: args.hbound = [20., 50.] else:", "parser.error( \"Error: choose between 'sum' and 'product'\") if args.copy: if args.bp_copy is None:", "IU will match with all stations in \" + \"the IU network [Default", "copy of the radial component \" + \"filtered at different corners for the", "+ \"a weighted average (using weights), or 'product' for the product \" +", "\" + \"comma-separated floats\") if args.bazbound is None: args.bazbound = [0.0, 360.0] else:", "Selection Criteria TimeGroup = parser.add_argument_group( title=\"Time Settings\", description=\"Settings associated with refining \" +", "Update attributes hkstack.hbound = args.hbound hkstack.kbound = args.kbound hkstack.dh = args.dh hkstack.dk =", "to delete outliers based on the MAD \" + \"on the variance. [Default", "type=int, default=40, help=\"Specify integer number of slowness bins to consider. \" + \"[Default", "parser.add_argument( \"--keys\", action=\"store\", type=str, dest=\"stkeys\", default=\"\", help=\"Specify a comma separated list of station", "processed folders procfold = [] # Loop over station keys for stkey in", "folder / \"Meta_Data.pkl\" if not metafile.is_file(): continue meta = pickle.load(open(metafile, 'rb')) # Skip", "'eps', 'pdf'. [Default 'png']\") args = parser.parse_args(argv) # Check inputs if not exist(args.indb):", "args.kbound.split(',')] args.kbound = sorted(args.kbound) if (len(args.kbound)) != 2: parser.error( \"Error: --kbound should contain", "OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # Import modules", "Get search end time if args.endT is None: tend = sta.enddate else: tend", "increase verbosity.\") parser.add_argument( \"-O\", \"--overwrite\", action=\"store_true\", dest=\"ovr\", default=False, help=\"Force the overwriting of pre-existing", "Settings', description=\"Options for pre-processing of receiver function \" + \"data prior to H-k", "datapath = Path('P_DATA') / stfld elif args.phase in ['S', 'SKS', 'allS']: datapath =", "\"Error: --kbound should contain 2 \" + \"comma-separated floats\") if args.weights is None:", "with minimum and maximum\" + \"bounds on Moho depth (H, in km). [Default", "date of station]\") PreGroup = parser.add_argument_group( title='Pre-processing Settings', description=\"Options for pre-processing of receiver", "args.endT = UTCDateTime(args.endT) except: parser.error( \"Cannot construct UTCDateTime from end time: \" +", "args.bazbound = sorted(args.bazbound) if (len(args.bazbound)) != 2: parser.error( \"Error: --bazbound should contain 2", "np.abs((varR-medvarR)/madvarR) outliersR = np.arange(len(rfRstream))[robustR > 2.5] for i in outliersR[::-1]: rfRstream.remove(rfRstream[i]) print('') print(\"Number", "= args.endT if tstart > sta.enddate or tend < sta.startdate: continue # Temporary", "database]\") parser.add_argument( \"-v\", \"-V\", \"--verbose\", action=\"store_true\", dest=\"verb\", default=False, help=\"Specify to increase verbosity.\") parser.add_argument(", "= db[stkey] # Construct Folder Name stfld = stkey if not args.lkey: stfld", "folder.name.split('_')[0] year = date[0:4] month = date[4:6] day = date[6:8] dateUTC = UTCDateTime(year+'-'+month+'-'+day)", "of station keys for \" + \"which to perform the analysis. These must", "nslow=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0] else: rf_tmp = binning.bin(rfRstream, typ='slow', nbin=args.nslow+1, pws=args.pws) rfRstream", "# Load Database db, stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys) # Track processed folders procfold", "slowness bins to consider. \" + \"[Default 40]\") PreGroup.add_argument( \"--snr\", action=\"store\", type=float, dest=\"snr\",", "doesn't save]\") # Constants Settings ModelGroup = parser.add_argument_group( title='Model Settings', description=\"Miscellaneous default values", "parser.add_argument( \"-O\", \"--overwrite\", action=\"store_true\", dest=\"ovr\", default=False, help=\"Force the overwriting of pre-existing data. \"", "SNR threshold for extracting receiver functions. \" + \"[Default None]\") PreGroup.add_argument( \"--snrh\", action=\"store\",", "from pathlib import Path from argparse import ArgumentParser from os.path import exists as", "(len(args.bp_copy)) != 2: parser.error( \"Error: --bp_copy should contain 2 \" + \"comma-separated floats\")", "choose between 'sum' and 'product'\") if args.copy: if args.bp_copy is None: args.bp_copy =", "in # all copies or substantial portions of the Software. # # THE", "minimum and maximum\" + \"frequency for the copies stream (Hz). [Default [0.05, 0.35]]\")", "40]\") PreGroup.add_argument( \"--snr\", action=\"store\", type=float, dest=\"snr\", default=-9999., help=\"Specify the SNR threshold for extracting", "t1) & (taxis < t2) varR.append(np.var(rfRstream[i].data[tselect])) varR = np.array(varR) # Remove outliers wrt", "= parser.add_argument_group( title='Settings for H-k Stacking', description=\"Specify parameters of H-k search, including\" +", "stream (Hz). [Default [0.05, 0.35]]\") HKGroup = parser.add_argument_group( title='Settings for H-k Stacking', description=\"Specify", "help=\"Specify to increase verbosity.\") parser.add_argument( \"-O\", \"--overwrite\", action=\"store_true\", dest=\"ovr\", default=False, help=\"Force the overwriting", "freqmax=args.bp_copy[1], corners=2, zerophase=True) # Check bin counts: for tr in rfRstream: if (tr.stats.nbin", "print(\"# | | | _| |_) | |_| | | | | |", "\"names, regardless of the key type of the database.\" ) # Event Selection", "+ \"the end time for the search. This will override any \" +", "if stfld in procfold: print(' {0} already processed...skipping '.format(stfld)) continue rfRstream = Stream()", "data # if meta.slow < args.slowbound[0] and meta.slow > args.slowbound[1]: # continue #", "if stream is too short if len(rfRstream) < 5: continue if args.save_plot and", "continue # Define save path if args.save: savepath = Path('HK_DATA') / stfld if", "help=\"Set this option to save the HkStack object to file. \" + \"[Default", "if args.typ not in ['sum', 'product']: parser.error( \"Error: choose between 'sum' and 'product'\")", "in list(stkeys): # Extract station information from dictionary sta = db[stkey] # Construct", "2: parser.error( \"Error: --bp should contain 2 \" + \"comma-separated floats\") ## JMG", "\" + \"the IU network [Default processes all stations in the database]\") parser.add_argument(", "= HkStack(rfRstream, rfV2=rfRstream_copy, strike=args.strike, dip=args.dip, vp=args.vp) except: hkstack = HkStack(rfRstream, strike=args.strike, dip=args.dip, vp=args.vp)", "free of charge, to any person obtaining a copy # of this software", "in ['sum', 'product']: parser.error( \"Error: choose between 'sum' and 'product'\") if args.copy: if", "# Define path to see if it exists if args.phase in ['P', 'PP',", "PreGroup.add_argument( \"--bp\", action=\"store\", type=str, dest=\"bp\", default=None, help=\"Specify the corner frequencies for the bandpass", "exist from numpy import nan def get_hk_arguments(argv=None): \"\"\" Get Options from :class:`~optparse.OptionParser` objects.", "\"\"\" Get Options from :class:`~optparse.OptionParser` objects. This function is used for data processing", "stfld if not datapath.is_dir(): print('Path to ' + str(datapath) + ' doesn`t exist", "args.listphase = ['P', 'PP'] elif args.phase == 'allS': args.listphase = ['S', 'SKS'] else:", "too short if len(rfRstream) < 5: continue if args.save_plot and not Path('HK_PLOTS').is_dir(): Path('HK_PLOTS').mkdir(parents=True)", "!= 2: parser.error( \"Error: --slowbound should contain 2 \" + \"comma-separated floats\") if", "print(\"#########################################\") print() # Run Input Parser args = get_hk_arguments() # Load Database db,", "if args.save: filename = savepath / (hkstack.rfV1[0].stats.station + \".hkstack.\"+args.typ+\".pkl\") hkstack.save(file=filename) # Update processed", "PreGroup = parser.add_argument_group( title='Pre-processing Settings', description=\"Options for pre-processing of receiver function \" +", "Get a copy of the radial component and filter if args.copy: rfRstream_copy =", "| | < #\") print(\"# |_| |_| | .__/ \\__, |___|_| |_|_|\\_\\ #\")", "the plot [Default doesn't save]\") PlotGroup.add_argument( \"--title\", action=\"store\", type=str, dest=\"title\", default=\"\", help=\"Specify plot", "\" + \"the times to include in searching for receiver function data\") TimeGroup.add_argument(", "open(filename, \"rb\") rfdata = pickle.load(file) rfRstream.append(rfdata[1]) file.close() if rfdata[0].stats.npts != 1451: print(folder) if", "metafile.is_file(): continue meta = pickle.load(open(metafile, 'rb')) # Skip data not in list of", "rfRstream[i].stats.taxis tselect = (taxis > t1) & (taxis < t2) varR.append(np.var(rfRstream[i].data[tselect])) varR =", "Load meta data metafile = folder / \"Meta_Data.pkl\" if not metafile.is_file(): continue meta", "\" + \"be used to match against those in the dictionary. For \"", "\" + \"the end time for the search. This will override any \"", "if args.phase in ['P', 'PP', 'allP']: datapath = Path('P_DATA') / stfld elif args.phase", "args.bazbound.split(',')] args.bazbound = sorted(args.bazbound) if (len(args.bazbound)) != 2: parser.error( \"Error: --bazbound should contain", "print() print(\"#########################################\") print(\"# __ _ _ #\") print(\"# _ __ / _|_ __", "action=\"store_true\", dest=\"pws\", default=False, help=\"Set this option to use phase-weighted stacking during binning \"", "on slowness (s/km). [Default [0.04, 0.08]]\") PreGroup.add_argument( \"--bazbound\", action=\"store\", dest=\"bazbound\", type=str, default=None, help=\"Specify", "stkey.split('.')[0]+\".\"+stkey.split('.')[1] # Define path to see if it exists if args.phase in ['P',", "THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "pws=args.pws) rfRstream = rf_tmp[0] # Get a copy of the radial component and", "args.bp = [float(val) for val in args.bp.split(',')] args.bp = sorted(args.bp) if (len(args.bp)) !=", "option to use a copy of the radial component \" + \"filtered at", "with minimum and maximum\" + \"bounds on slowness (s/km). [Default [0.04, 0.08]]\") PreGroup.add_argument(", "[arguments] <station database>\", description=\"Script used to process receiver function data \" + \"for", "\"-L\", \"--long-name\", action=\"store_true\", dest=\"lkey\", default=False, help=\"Force folder names to use long-key form (NET.STN.CHN).", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #", "SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "SOFTWARE. # Import modules and functions import numpy as np import pickle import", "0: args.stkeys = args.stkeys.split(',') # construct start time if len(args.startT) > 0: try:", "= None # construct end time if len(args.endT) > 0: try: args.endT =", "if it exists if args.phase in ['P', 'PP', 'allP']: datapath = Path('P_DATA') /", "to process from.\", type=str) parser.add_argument( \"--keys\", action=\"store\", type=str, dest=\"stkeys\", default=\"\", help=\"Specify a comma", "PreGroup.add_argument( \"--copy\", action=\"store_true\", dest=\"copy\", default=False, help=\"Set this option to use a copy of", "Check bin counts: for tr in rfRstream: if (tr.stats.nbin < args.binlim): rfRstream.remove(tr) #", "in range(0, len(tlocs)): if len(tlocs[il]) == 0: tlocs[il] = \"--\" sta.location = tlocs", "Software, and to permit persons to whom the Software is # furnished to", "t2 = 30. varR = [] for i in range(len(rfRstream)): taxis = rfRstream[i].stats.taxis", "+ \"[Default 40]\") PreGroup.add_argument( \"--snr\", action=\"store\", type=float, dest=\"snr\", default=-9999., help=\"Specify the SNR threshold", "save]\") # Constants Settings ModelGroup = parser.add_argument_group( title='Model Settings', description=\"Miscellaneous default values and", "product \" + \"of positive values in stacks. [Default 'sum']\") HKGroup.add_argument( \"--save\", action=\"store_true\",", "formats: 'png', 'jpg', 'eps', 'pdf'. [Default 'png']\") args = parser.parse_args(argv) # Check inputs", "args.hbound.split(',')] args.hbound = sorted(args.hbound) if (len(args.hbound)) != 2: parser.error( \"Error: --hbound should contain", "[Default 'png']\") args = parser.parse_args(argv) # Check inputs if not exist(args.indb): parser.error(\"Input file", "for the search. This will override any \" + \"station start times. [Default", "action=\"store\", type=str, dest=\"hbound\", default=None, help=\"Specify a list of two floats with minimum and", "help=\"Set this option to use phase-weighted stacking during binning \" + \" [Default", "plotting results', description=\"Specify parameters for plotting the H-k stacks.\") PlotGroup.add_argument( \"--plot\", action=\"store_true\", dest=\"plot\",", "doesn't save]\") PlotGroup.add_argument( \"--title\", action=\"store\", type=str, dest=\"title\", default=\"\", help=\"Specify plot title [Default has", "over station keys for stkey in list(stkeys): # Extract station information from dictionary", "type=str, dest=\"kbound\", default=None, help=\"Specify a list of two floats with minimum and maximum\"", "Can be any one of the valid\" + \"matplotlib formats: 'png', 'jpg', 'eps',", "= stkey.split('.')[0]+\".\"+stkey.split('.')[1] # Define path to see if it exists if args.phase in", "\"\"\" parser = ArgumentParser( usage=\"%(prog)s [arguments] <station database>\", description=\"Script used to process receiver", "continue rfRstream = Stream() datafiles = [x for x in datapath.iterdir() if x.is_dir()]", "include in searching for receiver function data\") TimeGroup.add_argument( \"--start\", action=\"store\", type=str, dest=\"startT\", default=\"\",", "dest=\"cc\", default=-1., help=\"Specify the CC threshold for extracting receiver functions. \" + \"[Default", "x in datapath.iterdir() if x.is_dir()] for folder in datafiles: # Skip hidden folders", "for plotting results', description=\"Specify parameters for plotting the H-k stacks.\") PlotGroup.add_argument( \"--plot\", action=\"store_true\",", "to H-k stacking\") PreGroup.add_argument( \"--binlim\", action=\"store\", type=float, dest=\"binlim\", default=1, help=\"Specify the minimum number", "the database.\" ) # Event Selection Criteria TimeGroup = parser.add_argument_group( title=\"Time Settings\", description=\"Settings", "\" + \"[Default None]\") PreGroup.add_argument( \"--snrh\", action=\"store\", type=float, dest=\"snrh\", default=-9999, help=\"Specify the horizontal", "elif args.strike is None or args.dip is None: parser.error(\"Specify both strike and dip", "from rfpy import binning, plotting, HkStack from pathlib import Path from argparse import", "end time if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except: parser.error( \"Cannot", "= open(filename, \"rb\") rfdata = pickle.load(file) rfRstream.append(rfdata[1]) file.close() if rfdata[0].stats.npts != 1451: print(folder)", "any one of the valid\" + \"matplotlib formats: 'png', 'jpg', 'eps', 'pdf'. [Default", "in ['S', 'SKS', 'allS']: datapath = Path('S_DATA') / stfld if not datapath.is_dir(): print('Path", "args.dh hkstack.dk = args.dk hkstack.weights = args.weights # Stack with or without dip", "\".hkstack.\"+args.typ+\".pkl\") hkstack.save(file=filename) # Update processed folders procfold.append(stfld) if __name__ == \"__main__\": # Run", "if args.save_plot and not Path('HK_PLOTS').is_dir(): Path('HK_PLOTS').mkdir(parents=True) print('') print(\"Number of radial RF bins: \"", "file \" + args.indb + \" does not exist\") # create station key", "/ (hkstack.rfV1[0].stats.station + \".hkstack.\"+args.typ+\".pkl\") hkstack.save(file=filename) # Update processed folders procfold.append(stfld) if __name__ ==", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "{0:>8s} |\".format( sta.station)) print(\"|===============================================|\") print(\"|===============================================|\") print(\"| Station: {0:>2s}.{1:5s} |\".format( sta.network, sta.station)) print(\"| Channel:", "option to produce a plot of the stacks [Default \" + \"does not", "of two floats with minimum and maximum\" + \"bounds on slowness (s/km). [Default", "|___|_| |_|_|\\_\\ #\") print(\"# |_| |___/_____| #\") print(\"# #\") print(\"#########################################\") print() # Run", "help=\"Specify plot title [Default has no title]\") PlotGroup.add_argument( \"--format\", action=\"store\", type=str, dest=\"form\", default=\"png\",", "Continue if stream is too short if len(rfRstream) < 5: continue if args.save_plot", "threshold for extracting receiver functions. \" + \"[Default None]\") PreGroup.add_argument( \"--snrh\", action=\"store\", type=float,", "parser.error( \"Cannot construct UTCDateTime from start time: \" + args.startT) else: args.startT =", "UTCDateTime(year+'-'+month+'-'+day) if dateUTC > tstart and dateUTC < tend: # Load meta data", "try: hkstack = HkStack(rfRstream, rfV2=rfRstream_copy, strike=args.strike, dip=args.dip, vp=args.vp) except: hkstack = HkStack(rfRstream, strike=args.strike,", "type=float, dest=\"cc\", default=-1., help=\"Specify the CC threshold for extracting receiver functions. \" +", "type=float, dest=\"binlim\", default=1, help=\"Specify the minimum number of RFs in each bin. [Default", "is used for data processing on-the-fly (requires web connection) \"\"\" parser = ArgumentParser(", "including\" + \"bounds on search, weights, type of stacking, etc.\") HKGroup.add_argument( \"--hbound\", action=\"store\",", "ModelGroup = parser.add_argument_group( title='Model Settings', description=\"Miscellaneous default values and settings\") ModelGroup.add_argument( \"--vp\", action=\"store\",", "used to process receiver function data \" + \"for H-k stacking.\") # General", "as np import pickle import stdb from obspy.clients.fdsn import Client from obspy.core import", "UTCDateTime(args.startT) except: parser.error( \"Cannot construct UTCDateTime from start time: \" + args.startT) else:", "a list of two floats with minimum and maximum\" + \"bounds on Vp/Vs", "Partial keys will \" + \"be used to match against those in the", "continue if meta.cc < args.cc: continue ''' # Check bounds on data #", "phase name to plot. \" + \"Options are 'P', 'PP', 'allP', 'S', 'SKS'", "= binning.bin_baz_slow(rfRstream, nbaz=args.nbaz+1, nslow=args.nslow+1, pws=args.pws) rfRstream = rf_tmp[0] else: rf_tmp = binning.bin(rfRstream, typ='slow',", "return args def main(): print() print(\"#########################################\") print(\"# __ _ _ #\") print(\"# _", "| | | '_ \\| |/ / #\") print(\"# | | | _|", "in the Software without restriction, including without limitation the rights # to use,", "should contain 2 \" + \"comma-separated floats\") if args.kbound is None: args.kbound =", "dipping Moho. [Default None]\") PlotGroup = parser.add_argument_group( title='Settings for plotting results', description=\"Specify parameters", "ModelGroup.add_argument( \"--vp\", action=\"store\", type=float, dest=\"vp\", default=6.0, help=\"Specify mean crustal Vp (km/s). [Default 6.0]\")", "default=False, help=\"Force folder names to use long-key form (NET.STN.CHN). \" + \"Default behaviour", "+ \"[Default 0.05,0.5]\") PreGroup.add_argument( \"--nbaz\", action=\"store\", dest=\"nbaz\", type=int, default=36, help=\"Specify integer number of", "QC Thresholding if meta.snrh < args.snrh: continue if meta.snr < args.snr: continue if", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS", "args.bp_copy = sorted(args.bp_copy) if (len(args.bp_copy)) != 2: parser.error( \"Error: --bp_copy should contain 2", "> sta.enddate or tend < sta.startdate: continue # Temporary print locations tlocs =", "\"--save\", action=\"store_true\", dest=\"save\", default=False, help=\"Set this option to save the HkStack object to", "values in stacks. [Default 'sum']\") HKGroup.add_argument( \"--save\", action=\"store_true\", dest=\"save\", default=False, help=\"Set this option", "args.plot: hkstack.plot(args.save_plot, args.title, args.form) if args.save: filename = savepath / (hkstack.rfV1[0].stats.station + \".hkstack.\"+args.typ+\".pkl\")", "default=-9999, help=\"Specify the horizontal component SNR threshold for \" + \"extracting receiver functions.", "file.close() if rfdata[0].stats.npts != 1451: print(folder) if len(rfRstream) == 0: continue if args.no_outl:", "not produce plot]\") PlotGroup.add_argument( \"--save-plot\", action=\"store_true\", dest=\"save_plot\", default=False, help=\"Set this option to save", "\"Error: --slowbound should contain 2 \" + \"comma-separated floats\") if args.bazbound is None:", "comma separated list of station keys for \" + \"which to perform the", "Define save path if args.save: savepath = Path('HK_DATA') / stfld if not savepath.is_dir():", "'+str(savepath)+' doesn`t exist - creating it') savepath.mkdir(parents=True) # Get search start time if", "+ \"matplotlib formats: 'png', 'jpg', 'eps', 'pdf'. [Default 'png']\") args = parser.parse_args(argv) #", "overwriting of pre-existing data. \" + \"[Default False]\") parser.add_argument( \"-L\", \"--long-name\", action=\"store_true\", dest=\"lkey\",", "| |__ | | __ #\") print(\"# | '__| |_| '_ \\| |", "' + str(datapath) + ' doesn`t exist - continuing') continue # Define save", "of two floats with minimum and maximum\" + \"bounds on Vp/Vs (k). [Default", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE", "if args.phase == 'allP': args.listphase = ['P', 'PP'] elif args.phase == 'allS': args.listphase", "MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "+ \" [Default False]\") PreGroup.add_argument( \"--phase\", action=\"store\", type=str, dest=\"phase\", default='allP', help=\"Specify the phase", "dest=\"strike\", default=None, help=\"Specify the strike of dipping Moho. [Default None]\") ModelGroup.add_argument( \"--dip\", action=\"store\",", "elif args.phase == 'allS': args.listphase = ['S', 'SKS'] else: args.listphase = [args.phase] if", "stack. [Default [0.5, 2., -1.]]\") HKGroup.add_argument( \"--type\", action=\"store\", type=str, dest=\"typ\", default=\"sum\", help=\"Specify type", "\"--bp\", action=\"store\", type=str, dest=\"bp\", default=None, help=\"Specify the corner frequencies for the bandpass filter.", "nan def get_hk_arguments(argv=None): \"\"\" Get Options from :class:`~optparse.OptionParser` objects. This function is used", "0: try: args.startT = UTCDateTime(args.startT) except: parser.error( \"Cannot construct UTCDateTime from start time:", "None: args.slowbound = [0.04, 0.08] else: args.slowbound = [float(val) for val in args.slowbound.split(',')]", "varR = [] for i in range(len(rfRstream)): taxis = rfRstream[i].stats.taxis tselect = (taxis", "# Update attributes hkstack.hbound = args.hbound hkstack.kbound = args.kbound hkstack.dh = args.dh hkstack.dk", "import numpy as np import pickle import stdb from obspy.clients.fdsn import Client from", "default=None, help=\"Specify a list of two floats with minimum and maximum\" + \"bounds", "Database db, stkeys = stdb.io.load_db(fname=args.indb, keys=args.stkeys) # Track processed folders procfold = []", "2 \" + \"comma-separated floats\") if args.hbound is None: args.hbound = [20., 50.]", "#\") print(\"# | | | _| |_) | |_| | | | |", "and/or sell # copies of the Software, and to permit persons to whom", "strike=args.strike, dip=args.dip, vp=args.vp) except: hkstack = HkStack(rfRstream, strike=args.strike, dip=args.dip, vp=args.vp) # Update attributes", "'allP']: datapath = Path('P_DATA') / stfld elif args.phase in ['S', 'SKS', 'allS']: datapath", "the minimum number of RFs in each bin. [Default 3]\") PreGroup.add_argument( \"--bp\", action=\"store\",", "parser.add_argument_group( title=\"Time Settings\", description=\"Settings associated with refining \" + \"the times to include", "datafiles = [x for x in datapath.iterdir() if x.is_dir()] for folder in datafiles:", "['P', 'PP', 'allP']: datapath = Path('P_DATA') / stfld elif args.phase in ['S', 'SKS',", "args.bp is None: args.bp = [0.05, 0.5] else: args.bp = [float(val) for val", "print(\"# _ __ / _|_ __ _ _ | |__ | | __", "file = open(filename, \"rb\") rfdata = pickle.load(file) rfRstream.append(rfdata[1]) file.close() if rfdata[0].stats.npts != 1451:", "minimum and maximum\" + \"bounds on slowness (s/km). [Default [0.04, 0.08]]\") PreGroup.add_argument( \"--bazbound\",", "\"Cannot construct UTCDateTime from start time: \" + args.startT) else: args.startT = None", "minimum and maximum\" + \"bounds on back azimuth (degrees). [Default [0, 360]]\") PreGroup.add_argument(", "\"-V\", \"--verbose\", action=\"store_true\", dest=\"verb\", default=False, help=\"Specify to increase verbosity.\") parser.add_argument( \"-O\", \"--overwrite\", action=\"store_true\",", "\"comma-separated floats\") if args.bazbound is None: args.bazbound = [0.0, 360.0] else: args.bazbound =", "def get_hk_arguments(argv=None): \"\"\" Get Options from :class:`~optparse.OptionParser` objects. This function is used for", "meta = pickle.load(open(metafile, 'rb')) # Skip data not in list of phases if", "+ \"the start time for the search. This will override any \" +", "sorted(args.bazbound) if (len(args.bazbound)) != 2: parser.error( \"Error: --bazbound should contain 2 \" +", "print(' {0} already processed...skipping '.format(stfld)) continue rfRstream = Stream() datafiles = [x for", "> t1) & (taxis < t2) varR.append(np.var(rfRstream[i].data[tselect])) varR = np.array(varR) # Remove outliers", "TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE", "use phase-weighted stacking during binning \" + \" [Default False]\") PreGroup.add_argument( \"--phase\", action=\"store\",", "+ \"of positive values in stacks. [Default 'sum']\") HKGroup.add_argument( \"--save\", action=\"store_true\", dest=\"save\", default=False,", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. #", "args.listphase = ['S', 'SKS'] else: args.listphase = [args.phase] if args.typ not in ['sum',", "procfold: print(' {0} already processed...skipping '.format(stfld)) continue rfRstream = Stream() datafiles = [x", "maximum\" + \"bounds on Moho depth (H, in km). [Default [20., 50.]]\") HKGroup.add_argument(", "keys for stkey in list(stkeys): # Extract station information from dictionary sta =", "data \" + \"for H-k stacking.\") # General Settings parser.add_argument( \"indb\", help=\"Station Database", "\"--dk\", action=\"store\", type=float, dest=\"dk\", default=0.02, help=\"Specify search interval for k. [Default 0.02]\") HKGroup.add_argument(", "val in args.bp.split(',')] args.bp = sorted(args.bp) if (len(args.bp)) != 2: parser.error( \"Error: --bp", "stfld = stkey.split('.')[0]+\".\"+stkey.split('.')[1] # Define path to see if it exists if args.phase", "# # The above copyright notice and this permission notice shall be included", "--slowbound should contain 2 \" + \"comma-separated floats\") if args.bazbound is None: args.bazbound", "robustR = np.abs((varR-medvarR)/madvarR) outliersR = np.arange(len(rfRstream))[robustR > 2.5] for i in outliersR[::-1]: rfRstream.remove(rfRstream[i])", "\"Software\"), to deal # in the Software without restriction, including without limitation the", "args.weights = [0.5, 2.0, -1.0] else: args.weights = [float(val) for val in args.weights.split(',')]", "in stacks. [Default 'sum']\") HKGroup.add_argument( \"--save\", action=\"store_true\", dest=\"save\", default=False, help=\"Set this option to", "# Update processed folders procfold.append(stfld) if __name__ == \"__main__\": # Run main program", "default=None, help=\"Specify a list of three floats with for Ps, Pps and Pass", "args.form) if args.save: filename = savepath / (hkstack.rfV1[0].stats.station + \".hkstack.\"+args.typ+\".pkl\") hkstack.save(file=filename) # Update", "from os.path import exists as exist from numpy import nan def get_hk_arguments(argv=None): \"\"\"", "== 'allP': args.listphase = ['P', 'PP'] elif args.phase == 'allS': args.listphase = ['S',", "path to see if it exists if args.phase in ['P', 'PP', 'allP']: datapath", "action=\"store\", type=float, dest=\"dh\", default=0.5, help=\"Specify search interval for H (km). [Default 0.5]\") HKGroup.add_argument(", "if (len(args.bp)) != 2: parser.error( \"Error: --bp should contain 2 \" + \"comma-separated", "should contain 2 \" + \"comma-separated floats\") ## JMG ## if args.phase not", "continue meta = pickle.load(open(metafile, 'rb')) # Skip data not in list of phases", "parser.error( \"Error: --slowbound should contain 2 \" + \"comma-separated floats\") if args.bazbound is", "threshold for extracting receiver functions. \" + \"[Default None]\") PreGroup.add_argument( \"--no-outlier\", action=\"store_true\", dest=\"no_outl\",", "IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "behaviour uses short key form (NET.STN) for the folder \" + \"names, regardless", "distribute, sublicense, and/or sell # copies of the Software, and to permit persons", "action=\"store\", type=float, dest=\"strike\", default=None, help=\"Specify the strike of dipping Moho. [Default None]\") ModelGroup.add_argument(", "# Check inputs if not exist(args.indb): parser.error(\"Input file \" + args.indb + \"", "in outliersR[::-1]: rfRstream.remove(rfRstream[i]) print('') print(\"Number of radial RF data: \" + str(len(rfRstream))) print('')", "form (NET.STN) for the folder \" + \"names, regardless of the key type", "\" + \"does not produce plot]\") PlotGroup.add_argument( \"--save-plot\", action=\"store_true\", dest=\"save_plot\", default=False, help=\"Set this", "stream is too short if len(rfRstream) < 5: continue if args.save_plot and not", "= UTCDateTime(args.startT) except: parser.error( \"Cannot construct UTCDateTime from start time: \" + args.startT)", "+ ' doesn`t exist - continuing') continue # Define save path if args.save:", "\"--vp\", action=\"store\", type=float, dest=\"vp\", default=6.0, help=\"Specify mean crustal Vp (km/s). [Default 6.0]\") ModelGroup.add_argument(", "|__ | | __ #\") print(\"# | '__| |_| '_ \\| | |", "+ \"bounds on slowness (s/km). [Default [0.04, 0.08]]\") PreGroup.add_argument( \"--bazbound\", action=\"store\", dest=\"bazbound\", type=str,", "WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO", "floats with minimum and maximum\" + \"bounds on Moho depth (H, in km).", "end time: \" + args.endT) else: args.endT = None if args.strike is None", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED", "see if it exists if args.phase in ['P', 'PP', 'allP']: datapath = Path('P_DATA')", "['S', 'SKS', 'allS']: datapath = Path('S_DATA') / stfld if not datapath.is_dir(): print('Path to", "names to use long-key form (NET.STN.CHN). \" + \"Default behaviour uses short key", "not in ['P', 'PP', 'allP', 'S', 'SKS', 'allS']: parser.error( \"Error: choose between 'P',", "(taxis < t2) varR.append(np.var(rfRstream[i].data[tselect])) varR = np.array(varR) # Remove outliers wrt variance within", "action=\"store\", type=float, dest=\"dk\", default=0.02, help=\"Specify search interval for k. [Default 0.02]\") HKGroup.add_argument( \"--weights\",", "description=\"Miscellaneous default values and settings\") ModelGroup.add_argument( \"--vp\", action=\"store\", type=float, dest=\"vp\", default=6.0, help=\"Specify mean", "and args.dip is None: args.calc_dip = False args.nbaz = None elif args.strike is", "\" + str(len(rfRstream))) print('') # Try binning if specified if args.calc_dip: rf_tmp =", "the variance. [Default False]\") PreGroup.add_argument( \"--slowbound\", action=\"store\", dest=\"slowbound\", type=str, default=None, help=\"Specify a list", "two floats with minimum and maximum\" + \"bounds on slowness (s/km). [Default [0.04,", "# create station key list if len(args.stkeys) > 0: args.stkeys = args.stkeys.split(',') #", "datafiles: # Skip hidden folders if folder.name.startswith('.'): continue date = folder.name.split('_')[0] year =", "__ _ _ | |__ | | __ #\") print(\"# | '__| |_|", "type of stacking, etc.\") HKGroup.add_argument( \"--hbound\", action=\"store\", type=str, dest=\"hbound\", default=None, help=\"Specify a list", "None: args.kbound = [1.56, 2.1] else: args.kbound = [float(val) for val in args.kbound.split(',')]", "data. \" + \"[Default False]\") parser.add_argument( \"-L\", \"--long-name\", action=\"store_true\", dest=\"lkey\", default=False, help=\"Force folder", "string representing \" + \"the start time for the search. This will override", "= [float(val) for val in args.kbound.split(',')] args.kbound = sorted(args.kbound) if (len(args.kbound)) != 2:", "help=\"Specify a comma separated list of station keys for \" + \"which to", "2019 <NAME> # # This file is part of RfPy. # # Permission", "+ \"[Default False]\") PreGroup.add_argument( \"--bp-copy\", action=\"store\", dest=\"bp_copy\", type=str, default=None, help=\"Specify a list of", "\" + \"weights in final stack. [Default [0.5, 2., -1.]]\") HKGroup.add_argument( \"--type\", action=\"store\",", "+ \"contained within the station database. Partial keys will \" + \"be used", "action=\"store_true\", dest=\"save_plot\", default=False, help=\"Set this option to save the plot [Default doesn't save]\")", "-1.]]\") HKGroup.add_argument( \"--type\", action=\"store\", type=str, dest=\"typ\", default=\"sum\", help=\"Specify type of final stacking. Options", "the station database. Partial keys will \" + \"be used to match against", "else: args.bazbound = [float(val) for val in args.bazbound.split(',')] args.bazbound = sorted(args.bazbound) if (len(args.bazbound))", "False args.nbaz = None elif args.strike is None or args.dip is None: parser.error(\"Specify", "\"--copy\", action=\"store_true\", dest=\"copy\", default=False, help=\"Set this option to use a copy of the", "portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "do so, subject to the following conditions: # # The above copyright notice", "description=\"Settings associated with refining \" + \"the times to include in searching for", "# Construct Folder Name stfld = stkey if not args.lkey: stfld = stkey.split('.')[0]+\".\"+stkey.split('.')[1]", "mean crustal Vp (km/s). [Default 6.0]\") ModelGroup.add_argument( \"--strike\", action=\"store\", type=float, dest=\"strike\", default=None, help=\"Specify", "to save the plot [Default doesn't save]\") PlotGroup.add_argument( \"--title\", action=\"store\", type=str, dest=\"title\", default=\"\",", "if rfdata[0].stats.npts != 1451: print(folder) if len(rfRstream) == 0: continue if args.no_outl: t1", "| .__/ \\__, |___|_| |_|_|\\_\\ #\") print(\"# |_| |___/_____| #\") print(\"# #\") print(\"#########################################\")", "the key type of the database.\" ) # Event Selection Criteria TimeGroup =", "rf_tmp[0] # Get a copy of the radial component and filter if args.copy:", "on the MAD \" + \"on the variance. [Default False]\") PreGroup.add_argument( \"--slowbound\", action=\"store\",", "permit persons to whom the Software is # furnished to do so, subject", "print(\"|===============================================|\") print(\"|===============================================|\") print(\"| Station: {0:>2s}.{1:5s} |\".format( sta.network, sta.station)) print(\"| Channel: {0:2s}; Locations: {1:15s}", "= 1.4826*np.median(np.abs(varR-medvarR)) robustR = np.abs((varR-medvarR)/madvarR) outliersR = np.arange(len(rfRstream))[robustR > 2.5] for i in", "help=\"Specify the strike of dipping Moho. [Default None]\") ModelGroup.add_argument( \"--dip\", action=\"store\", type=float, dest=\"dip\",", "This will override any \" + \"station end times [Default end date of", "help=\"Specify a list of two floats with minimum and maximum\" + \"bounds on", "in datafiles: # Skip hidden folders if folder.name.startswith('.'): continue date = folder.name.split('_')[0] year", "None]\") PreGroup.add_argument( \"--no-outlier\", action=\"store_true\", dest=\"no_outl\", default=False, help=\"Set this option to delete outliers based", "search start time if args.startT is None: tstart = sta.startdate else: tstart =", "\"--slowbound\", action=\"store\", dest=\"slowbound\", type=str, default=None, help=\"Specify a list of two floats with minimum", "= args.dh hkstack.dk = args.dk hkstack.weights = args.weights # Stack with or without", "= folder / \"RF_Data.pkl\" if filename.is_file(): file = open(filename, \"rb\") rfdata = pickle.load(file)", "to use a copy of the radial component \" + \"filtered at different", "= args.stkeys.split(',') # construct start time if len(args.startT) > 0: try: args.startT =", "dest=\"dh\", default=0.5, help=\"Specify search interval for H (km). [Default 0.5]\") HKGroup.add_argument( \"--kbound\", action=\"store\",", "type=float, dest=\"dh\", default=0.5, help=\"Specify search interval for H (km). [Default 0.5]\") HKGroup.add_argument( \"--kbound\",", "type=str, dest=\"typ\", default=\"sum\", help=\"Specify type of final stacking. Options are: 'sum' for \"", "title='Model Settings', description=\"Miscellaneous default values and settings\") ModelGroup.add_argument( \"--vp\", action=\"store\", type=float, dest=\"vp\", default=6.0,", "= stdb.io.load_db(fname=args.indb, keys=args.stkeys) # Track processed folders procfold = [] # Loop over", "PreGroup.add_argument( \"--bp-copy\", action=\"store\", dest=\"bp_copy\", type=str, default=None, help=\"Specify a list of two floats with", "for plotting the H-k stacks.\") PlotGroup.add_argument( \"--plot\", action=\"store_true\", dest=\"plot\", default=False, help=\"Set this option", "if len(rfRstream) == 0: continue if args.no_outl: t1 = 0. t2 = 30.", "dest=\"pws\", default=False, help=\"Set this option to use phase-weighted stacking during binning \" +", "tstart > sta.enddate or tend < sta.startdate: continue # Temporary print locations tlocs", "Settings parser.add_argument( \"indb\", help=\"Station Database to process from.\", type=str) parser.add_argument( \"--keys\", action=\"store\", type=str,", "rfRstream_copy.filter('bandpass', freqmin=args.bp_copy[0], freqmax=args.bp_copy[1], corners=2, zerophase=True) # Check bin counts: for tr in rfRstream:", "else: args.bp = [float(val) for val in args.bp.split(',')] args.bp = sorted(args.bp) if (len(args.bp))", "for folder already processed if stfld in procfold: print(' {0} already processed...skipping '.format(stfld))", "[float(val) for val in args.hbound.split(',')] args.hbound = sorted(args.hbound) if (len(args.hbound)) != 2: parser.error(", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS", "\"[Default 0.05,0.5]\") PreGroup.add_argument( \"--nbaz\", action=\"store\", dest=\"nbaz\", type=int, default=36, help=\"Specify integer number of back-azimuth", "2.1] else: args.kbound = [float(val) for val in args.kbound.split(',')] args.kbound = sorted(args.kbound) if", "< #\") print(\"# |_| |_| | .__/ \\__, |___|_| |_|_|\\_\\ #\") print(\"# |_|", "hkstack.hbound = args.hbound hkstack.kbound = args.kbound hkstack.dh = args.dh hkstack.dk = args.dk hkstack.weights", "'SKS' and 'allS'.\") if args.phase == 'allP': args.listphase = ['P', 'PP'] elif args.phase", "dest=\"endT\", default=\"\", help=\"Specify a UTCDateTime compatible string representing \" + \"the end time", "## JMG ## if args.slowbound is None: args.slowbound = [0.04, 0.08] else: args.slowbound", "date = folder.name.split('_')[0] year = date[0:4] month = date[4:6] day = date[6:8] dateUTC", "filter. \" + \"[Default 0.05,0.5]\") PreGroup.add_argument( \"--nbaz\", action=\"store\", dest=\"nbaz\", type=int, default=36, help=\"Specify integer", "zerophase=True) # Check bin counts: for tr in rfRstream: if (tr.stats.nbin < args.binlim):", "[20., 50.]]\") HKGroup.add_argument( \"--dh\", action=\"store\", type=float, dest=\"dh\", default=0.5, help=\"Specify search interval for H", "maximum\" + \"bounds on back azimuth (degrees). [Default [0, 360]]\") PreGroup.add_argument( \"--pws\", action=\"store_true\",", "== 0: continue if args.no_outl: t1 = 0. t2 = 30. varR =", "(k). [Default [1.56, 2.1]]\") HKGroup.add_argument( \"--dk\", action=\"store\", type=float, dest=\"dk\", default=0.02, help=\"Specify search interval", "the dip of dipping Moho. [Default None]\") PlotGroup = parser.add_argument_group( title='Settings for plotting", "station keys for stkey in list(stkeys): # Extract station information from dictionary sta", "list of two floats with minimum and maximum\" + \"frequency for the copies", "<reponame>wsja/RfPy<filename>rfpy/scripts/rfpy_hk.py #!/usr/bin/env python # Copyright 2019 <NAME> # # This file is part", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", "binning \" + \" [Default False]\") PreGroup.add_argument( \"--phase\", action=\"store\", type=str, dest=\"phase\", default='allP', help=\"Specify", "# Event Selection Criteria TimeGroup = parser.add_argument_group( title=\"Time Settings\", description=\"Settings associated with refining", "= parser.add_argument_group( title=\"Time Settings\", description=\"Settings associated with refining \" + \"the times to", "list of two floats with minimum and maximum\" + \"bounds on Vp/Vs (k).", "between 'sum' and 'product'\") if args.copy: if args.bp_copy is None: args.bp_copy = [0.05,", "t2) varR.append(np.var(rfRstream[i].data[tselect])) varR = np.array(varR) # Remove outliers wrt variance within time range", "= [float(val) for val in args.weights.split(',')] if (len(args.weights)) != 3: parser.error( \"Error: --weights", "for x in datapath.iterdir() if x.is_dir()] for folder in datafiles: # Skip hidden", "plotting, HkStack from pathlib import Path from argparse import ArgumentParser from os.path import", "contain 2 \" + \"comma-separated floats\") if args.kbound is None: args.kbound = [1.56,", "(taxis > t1) & (taxis < t2) varR.append(np.var(rfRstream[i].data[tselect])) varR = np.array(varR) # Remove", "save the plot [Default doesn't save]\") PlotGroup.add_argument( \"--title\", action=\"store\", type=str, dest=\"title\", default=\"\", help=\"Specify", "\" + \"[Default 36]\") PreGroup.add_argument( \"--nslow\", action=\"store\", dest=\"nslow\", type=int, default=40, help=\"Specify integer number", "dateUTC < tend: # Load meta data metafile = folder / \"Meta_Data.pkl\" if", "Update Display print(\" \") print(\" \") print(\"|===============================================|\") print(\"|===============================================|\") print(\"| {0:>8s} |\".format( sta.station)) print(\"|===============================================|\")", "= Path('HK_DATA') / stfld if not savepath.is_dir(): print('Path to '+str(savepath)+' doesn`t exist -", "function is used for data processing on-the-fly (requires web connection) \"\"\" parser =", "# construct end time if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except:", "= [float(val) for val in args.bp.split(',')] args.bp = sorted(args.bp) if (len(args.bp)) != 2:", "< t2) varR.append(np.var(rfRstream[i].data[tselect])) varR = np.array(varR) # Remove outliers wrt variance within time", "# Import modules and functions import numpy as np import pickle import stdb", "description=\"Specify parameters of H-k search, including\" + \"bounds on search, weights, type of", "tend < sta.startdate: continue # Temporary print locations tlocs = sta.location if len(tlocs)", "[Default [0.5, 2., -1.]]\") HKGroup.add_argument( \"--type\", action=\"store\", type=str, dest=\"typ\", default=\"sum\", help=\"Specify type of", "stacks [Default \" + \"does not produce plot]\") PlotGroup.add_argument( \"--save-plot\", action=\"store_true\", dest=\"save_plot\", default=False,", "0: tlocs[il] = \"--\" sta.location = tlocs # Update Display print(\" \") print(\"", "\"filtered at different corners for the Pps and Pss phases. \" + \"[Default", "times. [Default start date of station]\") TimeGroup.add_argument( \"--end\", action=\"store\", type=str, dest=\"endT\", default=\"\", help=\"Specify", "print('') # Try binning if specified if args.calc_dip: rf_tmp = binning.bin_baz_slow(rfRstream, nbaz=args.nbaz+1, nslow=args.nslow+1,", "_ | |__ | | __ #\") print(\"# | '__| |_| '_ \\|", "and 'product'\") if args.copy: if args.bp_copy is None: args.bp_copy = [0.05, 0.35] else:", "args.stkeys = args.stkeys.split(',') # construct start time if len(args.startT) > 0: try: args.startT", "floats\") return args def main(): print() print(\"#########################################\") print(\"# __ _ _ #\") print(\"#", "if len(args.endT) > 0: try: args.endT = UTCDateTime(args.endT) except: parser.error( \"Cannot construct UTCDateTime", "__ / _|_ __ _ _ | |__ | | __ #\") print(\"#", "perform the analysis. These must be \" + \"contained within the station database.", "function \" + \"data prior to H-k stacking\") PreGroup.add_argument( \"--binlim\", action=\"store\", type=float, dest=\"binlim\",", "of RFs in each bin. [Default 3]\") PreGroup.add_argument( \"--bp\", action=\"store\", type=str, dest=\"bp\", default=None,", "TimeGroup.add_argument( \"--end\", action=\"store\", type=str, dest=\"endT\", default=\"\", help=\"Specify a UTCDateTime compatible string representing \"", "hereby granted, free of charge, to any person obtaining a copy # of", "database. Partial keys will \" + \"be used to match against those in", "\"Options are 'P', 'PP', 'allP', 'S', 'SKS' or 'allS'. \" + \"[Default 'allP']\")", "hkstack.save(file=filename) # Update processed folders procfold.append(stfld) if __name__ == \"__main__\": # Run main", "\"--dh\", action=\"store\", type=float, dest=\"dh\", default=0.5, help=\"Specify search interval for H (km). [Default 0.5]\")", "hkstack.plot(args.save_plot, args.title, args.form) if args.save: filename = savepath / (hkstack.rfV1[0].stats.station + \".hkstack.\"+args.typ+\".pkl\") hkstack.save(file=filename)", "\" + \"names, regardless of the key type of the database.\" ) #", "date[0:4] month = date[4:6] day = date[6:8] dateUTC = UTCDateTime(year+'-'+month+'-'+day) if dateUTC >", "of stacking, etc.\") HKGroup.add_argument( \"--hbound\", action=\"store\", type=str, dest=\"hbound\", default=None, help=\"Specify a list of", "args.weights.split(',')] if (len(args.weights)) != 3: parser.error( \"Error: --weights should contain 3 \" +", "args.no_outl: t1 = 0. t2 = 30. varR = [] for i in", "hkstack.dk = args.dk hkstack.weights = args.weights # Stack with or without dip if", "JMG ## if args.phase not in ['P', 'PP', 'allP', 'S', 'SKS', 'allS']: parser.error(", "len(rfRstream) < 5: continue if args.save_plot and not Path('HK_PLOTS').is_dir(): Path('HK_PLOTS').mkdir(parents=True) print('') print(\"Number of", "\" + \"Options are 'P', 'PP', 'allP', 'S', 'SKS' or 'allS'. \" +", "if tstart > sta.enddate or tend < sta.startdate: continue # Temporary print locations", "as exist from numpy import nan def get_hk_arguments(argv=None): \"\"\" Get Options from :class:`~optparse.OptionParser`", "copies stream (Hz). [Default [0.05, 0.35]]\") HKGroup = parser.add_argument_group( title='Settings for H-k Stacking',", "meta.slow > args.slowbound[1]: # continue # if meta.baz < args.bazbound[0] and meta.baz >", "tend = args.endT if tstart > sta.enddate or tend < sta.startdate: continue #", "args def main(): print() print(\"#########################################\") print(\"# __ _ _ #\") print(\"# _ __" ]
[ "= [SIZE] # SIZE == 40 [SIZE] self.y = [SIZE] self.dead = False", "class Apple: def __init__(self, parent_screen): self.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.parent_screen = parent_screen self.x =", "class Snake: def __init__(self, parent_screen): self.parent_screen = parent_screen self.image = pygame.image.load(\"resources/block.jpg\").convert() self.direction =", "self.snake2.walk() print(\"BAKUGOU ALIVE\") # self.snake2.walk() # TODO: Make the apple become two self.apple1.draw()", "self.snake2.x = [40] self.snake2.y = [40] self.snake1.draw() self.snake2.draw() self.snake1.dead = False self.snake2.dead =", "become two self.apple1.draw() self.apple2.draw() self.display_score() pygame.display.flip() # for the screen update # snake", "self.direction = 'right' def move_up(self): self.direction = 'up' def move_down(self): self.direction = 'down'", "= Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() self.apple2.draw() def play_background_music(self): # todo spend times read", "True: print(\"default true ?? \") raise \"Collision Occured\" if self.snake1.dead == False: self.snake1.walk()", "(200, 350)) pygame.mixer.music.pause() pygame.display.flip() def run(self): running = True pause = False while", "False if event.key == K_RETURN: pygame.mixer.music.unpause() pause = False if not pause: #", "the blue one. # for initialize the two snakes's pictures. self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert()", "= -10000000000 - 1000 * i self.snake1.y[i] = -10000000000 - 1000 * i", "40 [SIZE] self.y = [SIZE] self.dead = False def move_left(self): self.direction = 'left'", "class Game: def __init__(self): pygame.init() pygame.display.set_caption(\"Codebasics Snake And Apple Game\") pygame.mixer.init() self.play_background_music() self.surface", "== 'right': self.x[0] += SIZE elif self.direction == 'up': self.y[0] -= SIZE elif", "two snakes's pictures. self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() # for initialize the", "todo make these sankes over the window # snake colliding with itself if", "self.snake2.dead = True # TODO if snake2's head eats snake1 it becomes bigger", "head eats snake1 it becomes bigger if self.snake1.dead == False and self.snake2.dead ==", "line1 = font.render(f\"Game is over! Your score is {self.snake1.length + self.snake2.length}\", True, WHITE_COLOR)", "becomes bigger if self.snake1.dead == False and self.snake2.dead == False: for i in", "= pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() # for initialize the two snakes' positions self.snake1.x", "self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True # TODO if snake2's head eats snake1 it", "= 40 BACKGROUND_COLOR = (110, 110, 5) WHITE_COLOR = (255, 255, 255) class", "24) * SIZE self.y = random.randint(1, 19) * SIZE def draw(self): self.parent_screen.blit(self.image, (self.x,", "read docs pygame.mixer.music.load('resources/My_Hero_Academy_OP.mp3') pygame.mixer.music.play(-1, 0) def play_sound(self, sound_name): if sound_name == \"crash\": sound", "in range(self.snake2.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True # TODO", "= pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() self.apple1 = Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple2 =", "WHITE_COLOR = (255, 255, 255) class Apple: def __init__(self, parent_screen): self.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert()", "apples self.apple1 = Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple1.draw() self.apple2 = Apple(self.surface) self.apple2.image =", "= random.randint(1, 19) * SIZE def draw(self): self.parent_screen.blit(self.image, (self.x, self.y)) pygame.display.flip() def move(self):", "if event.key == K_LEFT: self.snake1.move_left() if event.key == K_RIGHT: self.snake1.move_right() if event.key ==", "in range(3, self.snake1.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True if", "self.surface.blit(line2, (200, 350)) pygame.mixer.music.pause() pygame.display.flip() def run(self): running = True pause = False", "# todo for the two apples self.apple1 = Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple1.draw()", "print(\"go expection\") if self.snake1.dead == True and self.snake2.dead == True: print(\"default true ??", "0) def play_sound(self, sound_name): if sound_name == \"crash\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\crash.mp3\") pygame.mixer.Sound.play(sound) elif", "self.apple1.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake1.x[0], self.snake1.y[0],", "self.y = random.randint(1, 19) * SIZE def draw(self): self.parent_screen.blit(self.image, (self.x, self.y)) pygame.display.flip() def", "False try: if not pause: self.play() except Exception as e: print(e) print(\"raise exception\")", "time import random SIZE = 40 BACKGROUND_COLOR = (110, 110, 5) WHITE_COLOR =", "K_RETURN: pygame.mixer.music.unpause() pause = False if not pause: # for the first snake", "+ self.snake2.length}\", True, WHITE_COLOR) self.surface.blit(line1, (200, 300)) # blit(source, postion) line2 = font.render(\"To", "\"Collision Occured\" if self.snake1.dead == False: self.snake1.walk() print(\"DEKU ALIVE\") if self.snake2.dead == False:", "self.length += 1 self.x.append(-1) self.y.append(-1) class Game: def __init__(self): pygame.init() pygame.display.set_caption(\"Codebasics Snake And", "# for the blue one. # for initialize the two snakes's pictures. self.snake1.image", "postion) line2 = font.render(\"To play again press Enter. To exit press Escape!\", True,", "self.snake1.y[0]): self.play_sound(\"crash\") self.snake1.dead = True for i in range(self.snake1.length): self.snake1.x[i] = -10000000000 -", "K_w: self.snake2.move_up() if event.key == K_s: self.snake2.move_down() elif event.type == QUIT: running =", "255) class Apple: def __init__(self, parent_screen): self.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.parent_screen = parent_screen self.x", "= -10000000000 - 1000 * i if self.snake2.dead == False: if self.collide_boundaries(self.snake2.x[0], self.snake2.y[0]):", "== K_s: self.snake2.move_down() elif event.type == QUIT: running = False try: if not", "pygame.mixer.init() self.play_background_music() self.surface = pygame.display.set_mode((1000, 800)) self.snake1 = Snake(self.surface) # for the yellow", "self.apple1.draw() if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple2.move() self.apple2.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0],", "for i in range(self.snake2.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True", "range(self.snake2.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True # TODO if", "random.randint(1, 24) * SIZE self.y = random.randint(1, 19) * SIZE class Snake: def", "the window # snake colliding with itself if self.snake1.dead == False: for i", "self.snake1.x[i] = -10000000000 - 1000 * i self.snake1.y[i] = -10000000000 - 1000 *", "== QUIT: running = False try: if not pause: self.play() except Exception as", "* SIZE class Snake: def __init__(self, parent_screen): self.parent_screen = parent_screen self.image = pygame.image.load(\"resources/block.jpg\").convert()", "range(self.length): self.parent_screen.blit(self.image, (self.x[i], self.y[i])) print(self.x[i], self.y[i]) pygame.display.update() def increase_length(self): self.length += 1 self.x.append(-1)", "self.snake1.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True if self.snake2.dead ==", "the yellow one. self.snake2 = Snake(self.surface) # for the blue one. # for", "[40] self.snake2.x = [40] self.snake2.y = [40] self.snake1.draw() self.snake2.draw() self.snake1.dead = False self.snake2.dead", "boundaries # collides with boundaries. if self.snake1.dead == False: if self.collide_boundaries(self.snake1.x[0], self.snake1.y[0]): self.play_sound(\"crash\")", "self.snake1.increase_length() self.apple2.move() self.apple2.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple2.move() self.apple2.draw() #", "= True # TODO if snake1's head eats snake2 it becomes bigger if", "pygame.init() pygame.display.set_caption(\"Codebasics Snake And Apple Game\") pygame.mixer.init() self.play_background_music() self.surface = pygame.display.set_mode((1000, 800)) self.snake1", "snake # if self.snake2.dead == True: # print(\"go expection\") if self.snake1.dead == True", "self.x.append(-1) self.y.append(-1) class Game: def __init__(self): pygame.init() pygame.display.set_caption(\"Codebasics Snake And Apple Game\") pygame.mixer.init()", "Exception as e: print(e) print(\"raise exception\") self.show_game_over() pause = True self.reset() # self.snake.walk()", "= [SIZE] self.dead = False def move_left(self): self.direction = 'left' def move_right(self): self.direction", "- 1000 * i def display_score(self): font = pygame.font.SysFont('arial', 30) score = font.render(f\"Score:", "40 BACKGROUND_COLOR = (110, 110, 5) WHITE_COLOR = (255, 255, 255) class Apple:", "for the two apples self.apple1 = Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple1.draw() self.apple2 =", "parent_screen): self.parent_screen = parent_screen self.image = pygame.image.load(\"resources/block.jpg\").convert() self.direction = 'down' self.length = 1", "self.play_sound(\"crash\") self.snake2.dead = True for i in range(self.snake2.length): self.snake2.x[i] = -10000000000 - 1000", "if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple1.x,", "todo for the two apples self.apple1 = Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple1.draw() self.apple2", "TODO: Make the apple become two self.apple1.draw() self.apple2.draw() self.display_score() pygame.display.flip() # for the", "with boundaries. if self.snake1.dead == False: if self.collide_boundaries(self.snake1.x[0], self.snake1.y[0]): self.play_sound(\"crash\") self.snake1.dead = True", "boundaries. if self.snake1.dead == False: if self.collide_boundaries(self.snake1.x[0], self.snake1.y[0]): self.play_sound(\"crash\") self.snake1.dead = True for", "== K_d: self.snake2.move_right() if event.key == K_w: self.snake2.move_up() if event.key == K_s: self.snake2.move_down()", "self.snake1.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple2.move() self.apple2.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\")", "import pygame from pygame.locals import * import time import random SIZE = 40", "self.render_background() # for the two snake # if self.snake2.dead == True: # print(\"go", "not pause: self.play() except Exception as e: print(e) print(\"raise exception\") self.show_game_over() pause =", "1 self.x.append(-1) self.y.append(-1) class Game: def __init__(self): pygame.init() pygame.display.set_caption(\"Codebasics Snake And Apple Game\")", "False def render_background(self): bg = pygame.image.load(\"resources/background_hero.jpg\") self.surface.blit(bg, (0, 0)) def play(self): self.render_background() #", "the two snake # if self.snake2.dead == True: # print(\"go expection\") if self.snake1.dead", "== False: if event.key == K_a: self.snake2.move_left() if event.key == K_d: self.snake2.move_right() if", "[40] self.snake2.y = [40] self.snake1.draw() self.snake2.draw() self.snake1.dead = False self.snake2.dead = False #", "if both's head eats it becomes bigger # snake colliding with the wall", "while running: for event in pygame.event.get(): if event.type == KEYDOWN: if event.key ==", "parent_screen self.x = random.randint(1, 24) * SIZE self.y = random.randint(1, 19) * SIZE", "SIZE elif self.direction == 'right': self.x[0] += SIZE elif self.direction == 'up': self.y[0]", "docs pygame.mixer.music.load('resources/My_Hero_Academy_OP.mp3') pygame.mixer.music.play(-1, 0) def play_sound(self, sound_name): if sound_name == \"crash\": sound =", "'right' def move_up(self): self.direction = 'up' def move_down(self): self.direction = 'down' def walk(self):", "== True and self.snake2.dead == True: print(\"default true ?? \") raise \"Collision Occured\"", "exit press Escape!\", True, WHITE_COLOR) self.surface.blit(line2, (200, 350)) pygame.mixer.music.pause() pygame.display.flip() def run(self): running", "= pygame.image.load(\"resources/gold_apple.jpg\").convert() def is_collision(self, x1, y1, x2, y2): if x1 >= x2 and", "self.snake1.dead = True for i in range(self.snake1.length): self.snake1.x[i] = -10000000000 - 1000 *", "= Snake(self.surface) # for the blue one. # for initialize the two snakes's", "pygame.display.update() def increase_length(self): self.length += 1 self.x.append(-1) self.y.append(-1) class Game: def __init__(self): pygame.init()", "self.snake2.move_up() if event.key == K_s: self.snake2.move_down() elif event.type == QUIT: running = False", "self.play_background_music() self.surface = pygame.display.set_mode((1000, 800)) self.snake1 = Snake(self.surface) # for the yellow one.", "two snakes' positions self.snake1.x = [920] self.snake1.y = [40] self.snake2.x = [40] self.snake2.y", "return True return False def collide_boundaries(self, x, y): if x > 1000 or", "eats snake1 it becomes bigger if self.snake1.dead == False and self.snake2.dead == False:", "self.apple2 = Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() def is_collision(self, x1, y1, x2, y2): if", "False: self.snake1.walk() print(\"DEKU ALIVE\") if self.snake2.dead == False: self.snake2.walk() print(\"BAKUGOU ALIVE\") # self.snake2.walk()", "== False and self.snake2.dead == False: for i in range(self.snake1.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0],", "font.render(f\"Game is over! Your score is {self.snake1.length + self.snake2.length}\", True, WHITE_COLOR) self.surface.blit(line1, (200,", "# for the first snake if self.snake1.dead == False: if event.key == K_LEFT:", "running = False if event.key == K_RETURN: pygame.mixer.music.unpause() pause = False if not", "self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple2.move() self.apple2.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple2.x, self.apple2.y):", "initialize the two snakes's pictures. self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() # for", "= False def move_left(self): self.direction = 'left' def move_right(self): self.direction = 'right' def", "print(\"not collide\") return False def render_background(self): bg = pygame.image.load(\"resources/background_hero.jpg\") self.surface.blit(bg, (0, 0)) def", "(self.x[i], self.y[i])) print(self.x[i], self.y[i]) pygame.display.update() def increase_length(self): self.length += 1 self.x.append(-1) self.y.append(-1) class", "and self.snake2.dead == False: for i in range(self.snake2.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake2.x[i], self.snake2.y[i]):", "self.apple2.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple2.move() self.apple2.draw() # todo make", "def display_score(self): font = pygame.font.SysFont('arial', 30) score = font.render(f\"Score: {self.snake1.length + self.snake2.length}\", True,", "== KEYDOWN: if event.key == K_ESCAPE: running = False if event.key == K_RETURN:", "= random.randint(1, 24) * SIZE self.y = random.randint(1, 19) * SIZE def draw(self):", "is_collision(self, x1, y1, x2, y2): if x1 >= x2 and x1 < x2", "self.snake2.dead = True for i in range(self.snake2.length): self.snake2.x[i] = -10000000000 - 1000 *", "ALIVE\") if self.snake2.dead == False: self.snake2.walk() print(\"BAKUGOU ALIVE\") # self.snake2.walk() # TODO: Make", "def move_right(self): self.direction = 'right' def move_up(self): self.direction = 'up' def move_down(self): self.direction", "= True for i in range(self.snake2.length): self.snake2.x[i] = -10000000000 - 1000 * i", "self.play_sound(\"crash\") self.snake2.dead = True # TODO if snake2's head eats snake1 it becomes", "== 'down': self.y[0] += SIZE self.draw() def draw(self): for i in range(self.length): self.parent_screen.blit(self.image,", "for i in range(3, self.snake2.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead =", "self.snake2.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True # TODO if", "in range(self.snake1.length): self.snake1.x[i] = -10000000000 - 1000 * i self.snake1.y[i] = -10000000000 -", "1, 0, -1): self.x[i] = self.x[i - 1] self.y[i] = self.y[i - 1]", "pygame from pygame.locals import * import time import random SIZE = 40 BACKGROUND_COLOR", "it becomes bigger # snake colliding with the wall boundaries # collides with", "False # todo for the two apples self.apple1 = Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert()", "False and self.snake2.dead == False: for i in range(self.snake1.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake1.x[i],", "self.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.parent_screen = parent_screen self.x = random.randint(1, 24) * SIZE self.y", "= pygame.display.set_mode((1000, 800)) self.snake1 = Snake(self.surface) # for the yellow one. self.snake2 =", "i in range(self.length - 1, 0, -1): self.x[i] = self.x[i - 1] self.y[i]", "[SIZE] # SIZE == 40 [SIZE] self.y = [SIZE] self.dead = False def", "pygame.mixer.music.load('resources/My_Hero_Academy_OP.mp3') pygame.mixer.music.play(-1, 0) def play_sound(self, sound_name): if sound_name == \"crash\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\crash.mp3\")", "\"ding\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\ding.mp3\") pygame.mixer.Sound.play(sound) # pygame.mixer.Sound.play(pygame.mixer.Sound(\"resouces/ding.mp3\")) print(\"todo recover\") def reset(self): self.snake1 =", "pygame.image.load(\"resources/bakugou.jpg\").convert() self.apple1 = Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple2 = Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert()", "if event.key == K_DOWN: self.snake1.move_down() # for the second snake if self.snake2.dead ==", "self.snake2.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True # TODO if snake1's head eats", "== False: for i in range(self.snake2.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead", ">= x2 and x1 < x2 + SIZE: if y1 >= y2 and", "Occured\" if self.snake1.dead == False: self.snake1.walk() print(\"DEKU ALIVE\") if self.snake2.dead == False: self.snake2.walk()", "QUIT: running = False try: if not pause: self.play() except Exception as e:", "if event.key == K_w: self.snake2.move_up() if event.key == K_s: self.snake2.move_down() elif event.type ==", "19) * SIZE def draw(self): self.parent_screen.blit(self.image, (self.x, self.y)) pygame.display.flip() def move(self): self.x =", "snake1 it becomes bigger if self.snake1.dead == False and self.snake2.dead == False: for", "- 1] self.y[i] = self.y[i - 1] # update head if self.direction ==", "= [40] self.snake2.y = [40] self.snake1.draw() self.snake2.draw() self.snake1.dead = False self.snake2.dead = False", "self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple1.x, self.apple1.y):", "= True pause = False while running: for event in pygame.event.get(): if event.type", "= 1 self.x = [SIZE] # SIZE == 40 [SIZE] self.y = [SIZE]", "# snake colliding with apple if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple1.move()", "self.snake1.dead == False and self.snake2.dead == False: for i in range(self.snake2.length): if self.is_collision(self.snake1.x[0],", "self.apple2.draw() def play_background_music(self): # todo spend times read docs pygame.mixer.music.load('resources/My_Hero_Academy_OP.mp3') pygame.mixer.music.play(-1, 0) def", "self.snake2.dead == True: # print(\"go expection\") if self.snake1.dead == True and self.snake2.dead ==", "is over! Your score is {self.snake1.length + self.snake2.length}\", True, WHITE_COLOR) self.surface.blit(line1, (200, 300))", "self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() self.apple1 = Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple2 = Apple(self.surface) self.apple2.image", "y1, x2, y2): if x1 >= x2 and x1 < x2 + SIZE:", "30) score = font.render(f\"Score: {self.snake1.length + self.snake2.length}\", True, (200, 200, 200)) self.surface.blit(score, (850,", "== K_RETURN: pygame.mixer.music.unpause() pause = False if not pause: # for the first", "self.y)) pygame.display.flip() def move(self): self.x = random.randint(1, 24) * SIZE self.y = random.randint(1,", "self.play() except Exception as e: print(e) print(\"raise exception\") self.show_game_over() pause = True self.reset()", "y1 >= y2 and y1 < y2 + SIZE: return True return False", "False if not pause: # for the first snake if self.snake1.dead == False:", "300)) # blit(source, postion) line2 = font.render(\"To play again press Enter. To exit", "pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\ding.mp3\") pygame.mixer.Sound.play(sound) # pygame.mixer.Sound.play(pygame.mixer.Sound(\"resouces/ding.mp3\")) print(\"todo recover\") def reset(self): self.snake1 = Snake(self.surface) self.snake2 =", "the two apples self.apple1 = Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple1.draw() self.apple2 = Apple(self.surface)", "K_d: self.snake2.move_right() if event.key == K_w: self.snake2.move_up() if event.key == K_s: self.snake2.move_down() elif", "the apple become two self.apple1.draw() self.apple2.draw() self.display_score() pygame.display.flip() # for the screen update", "= pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple2 = Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() def is_collision(self, x1, y1, x2,", "< y2 + SIZE: return True return False def collide_boundaries(self, x, y): if", "head if self.direction == 'left': self.x[0] -= SIZE elif self.direction == 'right': self.x[0]", "second snake if self.snake2.dead == False: if event.key == K_a: self.snake2.move_left() if event.key", "SIZE self.y = random.randint(1, 19) * SIZE def draw(self): self.parent_screen.blit(self.image, (self.x, self.y)) pygame.display.flip()", "SIZE def draw(self): self.parent_screen.blit(self.image, (self.x, self.y)) pygame.display.flip() def move(self): self.x = random.randint(1, 24)", "pygame.image.load(\"resources/bakugou.jpg\").convert() # for initialize the two snakes' positions self.snake1.x = [920] self.snake1.y =", "'down': self.y[0] += SIZE self.draw() def draw(self): for i in range(self.length): self.parent_screen.blit(self.image, (self.x[i],", "in pygame.event.get(): if event.type == KEYDOWN: if event.key == K_ESCAPE: running = False", "range(self.snake1.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True # TODO if", "True if self.snake2.dead == False: for i in range(3, self.snake2.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0],", "True and self.snake2.dead == True: print(\"default true ?? \") raise \"Collision Occured\" if", "self.show_game_over() pause = True self.reset() # self.snake.walk() time.sleep(.1) if __name__ == \"__main__\": game", "110, 5) WHITE_COLOR = (255, 255, 255) class Apple: def __init__(self, parent_screen): self.image", "pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() # for initialize the two snakes' positions self.snake1.x =", "print(\"BAKUGOU ALIVE\") # self.snake2.walk() # TODO: Make the apple become two self.apple1.draw() self.apple2.draw()", "todo spend times read docs pygame.mixer.music.load('resources/My_Hero_Academy_OP.mp3') pygame.mixer.music.play(-1, 0) def play_sound(self, sound_name): if sound_name", "Snake(self.surface) # for the blue one. # for initialize the two snakes's pictures.", "= True # TODO if both's head eats it becomes bigger # snake", "self.x = random.randint(1, 24) * SIZE self.y = random.randint(1, 19) * SIZE def", "pygame.font.SysFont('arial', 30) score = font.render(f\"Score: {self.snake1.length + self.snake2.length}\", True, (200, 200, 200)) self.surface.blit(score,", "pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.parent_screen = parent_screen self.x = random.randint(1, 24) * SIZE self.y = random.randint(1,", "self.surface.blit(bg, (0, 0)) def play(self): self.render_background() # for the two snake # if", "== False: if self.collide_boundaries(self.snake1.x[0], self.snake1.y[0]): self.play_sound(\"crash\") self.snake1.dead = True for i in range(self.snake1.length):", "\"crash\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\crash.mp3\") pygame.mixer.Sound.play(sound) elif sound_name == \"ding\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\ding.mp3\") pygame.mixer.Sound.play(sound)", "self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True # TODO if snake1's head", "False self.snake2.dead = False self.snake1.x = [920] self.snake1.y = [40] self.snake2.x = [40]", "play_background_music(self): # todo spend times read docs pygame.mixer.music.load('resources/My_Hero_Academy_OP.mp3') pygame.mixer.music.play(-1, 0) def play_sound(self, sound_name):", "range(3, self.snake2.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True # TODO", "# snake colliding with the wall boundaries # collides with boundaries. if self.snake1.dead", "self.dead = False def move_left(self): self.direction = 'left' def move_right(self): self.direction = 'right'", "< x2 + SIZE: if y1 >= y2 and y1 < y2 +", "# snake colliding with itself if self.snake1.dead == False: for i in range(3,", "(200, 300)) # blit(source, postion) line2 = font.render(\"To play again press Enter. To", "WHITE_COLOR) self.surface.blit(line1, (200, 300)) # blit(source, postion) line2 = font.render(\"To play again press", "K_DOWN: self.snake1.move_down() # for the second snake if self.snake2.dead == False: if event.key", "'up': self.y[0] -= SIZE elif self.direction == 'down': self.y[0] += SIZE self.draw() def", "self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() def is_collision(self, x1, y1, x2, y2): if x1 >= x2", "Apple Game\") pygame.mixer.init() self.play_background_music() self.surface = pygame.display.set_mode((1000, 800)) self.snake1 = Snake(self.surface) # for", "self.snake2.y = [40] self.snake1.draw() self.snake2.draw() self.snake1.dead = False self.snake2.dead = False # todo", "= False if event.key == K_RETURN: pygame.mixer.music.unpause() pause = False if not pause:", "[920] self.snake1.y = [40] self.snake2.x = [40] self.snake2.y = [40] self.snake1.draw() self.snake2.draw() self.snake1.dead", "-= SIZE elif self.direction == 'down': self.y[0] += SIZE self.draw() def draw(self): for", "False: if event.key == K_a: self.snake2.move_left() if event.key == K_d: self.snake2.move_right() if event.key", "if event.key == K_UP: self.snake1.move_up() if event.key == K_DOWN: self.snake1.move_down() # for the", "self.apple1.draw() self.apple2.draw() self.display_score() pygame.display.flip() # for the screen update # snake colliding with", "self.snake2.dead == False: if self.collide_boundaries(self.snake2.x[0], self.snake2.y[0]): self.play_sound(\"crash\") self.snake2.dead = True for i in", "SIZE: return True return False def collide_boundaries(self, x, y): if x > 1000", "# for initialize the two snakes' positions self.snake1.x = [920] self.snake1.y = [40]", "snake colliding with the wall boundaries # collides with boundaries. if self.snake1.dead ==", "bigger # snake colliding with the wall boundaries # collides with boundaries. if", "if snake2's head eats snake1 it becomes bigger if self.snake1.dead == False and", "self.y = random.randint(1, 19) * SIZE class Snake: def __init__(self, parent_screen): self.parent_screen =", "SIZE self.y = random.randint(1, 19) * SIZE class Snake: def __init__(self, parent_screen): self.parent_screen", "self.direction == 'down': self.y[0] += SIZE self.draw() def draw(self): for i in range(self.length):", "self.snake2 = Snake(self.surface) self.snake1.dead = False self.snake2.dead = False self.snake1.x = [920] self.snake1.y", "(200, 200, 200)) self.surface.blit(score, (850, 10)) def show_game_over(self): self.render_background() font = pygame.font.SysFont(\"arial\", 30)", "snake colliding with apple if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple1.move() self.apple1.draw()", "draw(self): self.parent_screen.blit(self.image, (self.x, self.y)) pygame.display.flip() def move(self): self.x = random.randint(1, 24) * SIZE", "self.x = [SIZE] # SIZE == 40 [SIZE] self.y = [SIZE] self.dead =", "event.key == K_ESCAPE: running = False if event.key == K_RETURN: pygame.mixer.music.unpause() pause =", "False self.snake1.x = [920] self.snake1.y = [40] self.snake2.x = [40] self.snake2.y = [40]", "random.randint(1, 19) * SIZE def draw(self): self.parent_screen.blit(self.image, (self.x, self.y)) pygame.display.flip() def move(self): self.x", "print(\"DEKU ALIVE\") if self.snake2.dead == False: self.snake2.walk() print(\"BAKUGOU ALIVE\") # self.snake2.walk() # TODO:", "recover\") def reset(self): self.snake1 = Snake(self.surface) self.snake2 = Snake(self.surface) self.snake1.dead = False self.snake2.dead", "self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple2.move() self.apple2.draw() # todo make these sankes over the", "run(self): running = True pause = False while running: for event in pygame.event.get():", "True pause = False while running: for event in pygame.event.get(): if event.type ==", "if self.snake1.dead == False: self.snake1.walk() print(\"DEKU ALIVE\") if self.snake2.dead == False: self.snake2.walk() print(\"BAKUGOU", "snake2 it becomes bigger if self.snake1.dead == False and self.snake2.dead == False: for", "= 'left' def move_right(self): self.direction = 'right' def move_up(self): self.direction = 'up' def", "for the yellow one. self.snake2 = Snake(self.surface) # for the blue one. #", "pictures. self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() # for initialize the two snakes'", "return False def render_background(self): bg = pygame.image.load(\"resources/background_hero.jpg\") self.surface.blit(bg, (0, 0)) def play(self): self.render_background()", "SIZE elif self.direction == 'up': self.y[0] -= SIZE elif self.direction == 'down': self.y[0]", "self.snake2.walk() # TODO: Make the apple become two self.apple1.draw() self.apple2.draw() self.display_score() pygame.display.flip() #", "pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\crash.mp3\") pygame.mixer.Sound.play(sound) elif sound_name == \"ding\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\ding.mp3\") pygame.mixer.Sound.play(sound) # pygame.mixer.Sound.play(pygame.mixer.Sound(\"resouces/ding.mp3\")) print(\"todo", "self.snake1.dead = False self.snake2.dead = False self.snake1.x = [920] self.snake1.y = [40] self.snake2.x", "SIZE elif self.direction == 'down': self.y[0] += SIZE self.draw() def draw(self): for i", "elif event.type == QUIT: running = False try: if not pause: self.play() except", "collide\") return False def render_background(self): bg = pygame.image.load(\"resources/background_hero.jpg\") self.surface.blit(bg, (0, 0)) def play(self):", "SIZE == 40 [SIZE] self.y = [SIZE] self.dead = False def move_left(self): self.direction", "False def collide_boundaries(self, x, y): if x > 1000 or x < 0:", "i self.snake2.y[i] = -10000000000 - 1000 * i def display_score(self): font = pygame.font.SysFont('arial',", "for the first snake if self.snake1.dead == False: if event.key == K_LEFT: self.snake1.move_left()", "* import time import random SIZE = 40 BACKGROUND_COLOR = (110, 110, 5)", "Your score is {self.snake1.length + self.snake2.length}\", True, WHITE_COLOR) self.surface.blit(line1, (200, 300)) # blit(source,", "parent_screen self.image = pygame.image.load(\"resources/block.jpg\").convert() self.direction = 'down' self.length = 1 self.x = [SIZE]", "update # snake colliding with apple if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake1.increase_length()", "def draw(self): self.parent_screen.blit(self.image, (self.x, self.y)) pygame.display.flip() def move(self): self.x = random.randint(1, 24) *", "self.snake2.length}\", True, (200, 200, 200)) self.surface.blit(score, (850, 10)) def show_game_over(self): self.render_background() font =", "== True: # print(\"go expection\") if self.snake1.dead == True and self.snake2.dead == True:", "eats it becomes bigger # snake colliding with the wall boundaries # collides", "= [920] self.snake1.y = [40] self.snake2.x = [40] self.snake2.y = [40] self.snake1.image =", "for the second snake if self.snake2.dead == False: if event.key == K_a: self.snake2.move_left()", "head eats snake2 it becomes bigger if self.snake1.dead == False and self.snake2.dead ==", "def move_down(self): self.direction = 'down' def walk(self): # update body for i in", "i in range(self.snake2.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True #", "self.snake1 = Snake(self.surface) self.snake2 = Snake(self.surface) self.snake1.dead = False self.snake2.dead = False self.snake1.x", "= pygame.font.SysFont(\"arial\", 30) line1 = font.render(f\"Game is over! Your score is {self.snake1.length +", "event.key == K_RETURN: pygame.mixer.music.unpause() pause = False if not pause: # for the", "running: for event in pygame.event.get(): if event.type == KEYDOWN: if event.key == K_ESCAPE:", "random.randint(1, 24) * SIZE self.y = random.randint(1, 19) * SIZE def draw(self): self.parent_screen.blit(self.image,", "self.snake2.dead == False: if event.key == K_a: self.snake2.move_left() if event.key == K_d: self.snake2.move_right()", "self.y[i]) pygame.display.update() def increase_length(self): self.length += 1 self.x.append(-1) self.y.append(-1) class Game: def __init__(self):", "two snake # if self.snake2.dead == True: # print(\"go expection\") if self.snake1.dead ==", "initialize the two snakes' positions self.snake1.x = [920] self.snake1.y = [40] self.snake2.x =", "x2, y2): if x1 >= x2 and x1 < x2 + SIZE: if", "snake colliding with itself if self.snake1.dead == False: for i in range(3, self.snake1.length):", "move_down(self): self.direction = 'down' def walk(self): # update body for i in range(self.length", "self.snake2.dead = False self.snake1.x = [920] self.snake1.y = [40] self.snake2.x = [40] self.snake2.y", "== True: print(\"default true ?? \") raise \"Collision Occured\" if self.snake1.dead == False:", "snake if self.snake2.dead == False: if event.key == K_a: self.snake2.move_left() if event.key ==", "try: if not pause: self.play() except Exception as e: print(e) print(\"raise exception\") self.show_game_over()", "self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True # TODO if both's head eats it", "pygame.event.get(): if event.type == KEYDOWN: if event.key == K_ESCAPE: running = False if", "# blit(source, postion) line2 = font.render(\"To play again press Enter. To exit press", "event.key == K_DOWN: self.snake1.move_down() # for the second snake if self.snake2.dead == False:", "y2): if x1 >= x2 and x1 < x2 + SIZE: if y1", "= pygame.image.load(\"resources/bakugou.jpg\").convert() # for initialize the two snakes' positions self.snake1.x = [920] self.snake1.y", "self.image = pygame.image.load(\"resources/block.jpg\").convert() self.direction = 'down' self.length = 1 self.x = [SIZE] #", "+ SIZE: if y1 >= y2 and y1 < y2 + SIZE: return", "= pygame.image.load(\"resources/background_hero.jpg\") self.surface.blit(bg, (0, 0)) def play(self): self.render_background() # for the two snake", "self.parent_screen.blit(self.image, (self.x, self.y)) pygame.display.flip() def move(self): self.x = random.randint(1, 24) * SIZE self.y", "self.snake1.dead == False and self.snake2.dead == False: for i in range(self.snake1.length): if self.is_collision(self.snake2.x[0],", "if self.direction == 'left': self.x[0] -= SIZE elif self.direction == 'right': self.x[0] +=", "19) * SIZE class Snake: def __init__(self, parent_screen): self.parent_screen = parent_screen self.image =", "True, (200, 200, 200)) self.surface.blit(score, (850, 10)) def show_game_over(self): self.render_background() font = pygame.font.SysFont(\"arial\",", "import * import time import random SIZE = 40 BACKGROUND_COLOR = (110, 110,", "# for the yellow one. self.snake2 = Snake(self.surface) # for the blue one.", "Snake(self.surface) # for the yellow one. self.snake2 = Snake(self.surface) # for the blue", "== False: for i in range(self.snake1.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead", "pygame.image.load(\"resources/block.jpg\").convert() self.direction = 'down' self.length = 1 self.x = [SIZE] # SIZE ==", "# for initialize the two snakes's pictures. self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert()", "reset(self): self.snake1 = Snake(self.surface) self.snake2 = Snake(self.surface) self.snake1.dead = False self.snake2.dead = False", "Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple1.draw() self.apple2 = Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() self.apple2.draw() def", "if event.key == K_d: self.snake2.move_right() if event.key == K_w: self.snake2.move_up() if event.key ==", "self.play_sound(\"crash\") self.snake1.dead = True # TODO if both's head eats it becomes bigger", "self.apple1.draw() self.apple2 = Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() self.apple2.draw() def play_background_music(self): # todo spend", "in range(self.snake2.length): self.snake2.x[i] = -10000000000 - 1000 * i self.snake2.y[i] = -10000000000 -", "# update head if self.direction == 'left': self.x[0] -= SIZE elif self.direction ==", "if not pause: self.play() except Exception as e: print(e) print(\"raise exception\") self.show_game_over() pause", "increase_length(self): self.length += 1 self.x.append(-1) self.y.append(-1) class Game: def __init__(self): pygame.init() pygame.display.set_caption(\"Codebasics Snake", "24) * SIZE self.y = random.randint(1, 19) * SIZE class Snake: def __init__(self,", "ALIVE\") # self.snake2.walk() # TODO: Make the apple become two self.apple1.draw() self.apple2.draw() self.display_score()", "in range(self.length - 1, 0, -1): self.x[i] = self.x[i - 1] self.y[i] =", "self.direction = 'up' def move_down(self): self.direction = 'down' def walk(self): # update body", "self.y[0] -= SIZE elif self.direction == 'down': self.y[0] += SIZE self.draw() def draw(self):", "for i in range(self.length): self.parent_screen.blit(self.image, (self.x[i], self.y[i])) print(self.x[i], self.y[i]) pygame.display.update() def increase_length(self): self.length", "if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True # TODO if snake2's", "Snake(self.surface) self.snake2 = Snake(self.surface) self.snake1.dead = False self.snake2.dead = False self.snake1.x = [920]", "def play_background_music(self): # todo spend times read docs pygame.mixer.music.load('resources/My_Hero_Academy_OP.mp3') pygame.mixer.music.play(-1, 0) def play_sound(self,", "if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True # TODO if snake1's", "= font.render(f\"Game is over! Your score is {self.snake1.length + self.snake2.length}\", True, WHITE_COLOR) self.surface.blit(line1,", "-= SIZE elif self.direction == 'right': self.x[0] += SIZE elif self.direction == 'up':", "sound_name == \"ding\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\ding.mp3\") pygame.mixer.Sound.play(sound) # pygame.mixer.Sound.play(pygame.mixer.Sound(\"resouces/ding.mp3\")) print(\"todo recover\") def reset(self):", "self.parent_screen = parent_screen self.x = random.randint(1, 24) * SIZE self.y = random.randint(1, 19)", "self.apple2.move() self.apple2.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple2.move() self.apple2.draw() # todo", "self.render_background() font = pygame.font.SysFont(\"arial\", 30) line1 = font.render(f\"Game is over! Your score is", "self.parent_screen = parent_screen self.image = pygame.image.load(\"resources/block.jpg\").convert() self.direction = 'down' self.length = 1 self.x", "if self.snake2.dead == True: # print(\"go expection\") if self.snake1.dead == True and self.snake2.dead", "self.snake1.move_left() if event.key == K_RIGHT: self.snake1.move_right() if event.key == K_UP: self.snake1.move_up() if event.key", "self.y[i])) print(self.x[i], self.y[i]) pygame.display.update() def increase_length(self): self.length += 1 self.x.append(-1) self.y.append(-1) class Game:", "= Snake(self.surface) self.snake2 = Snake(self.surface) self.snake1.dead = False self.snake2.dead = False self.snake1.x =", "800)) self.snake1 = Snake(self.surface) # for the yellow one. self.snake2 = Snake(self.surface) #", "x2 + SIZE: if y1 >= y2 and y1 < y2 + SIZE:", "True for i in range(self.snake2.length): self.snake2.x[i] = -10000000000 - 1000 * i self.snake2.y[i]", "def show_game_over(self): self.render_background() font = pygame.font.SysFont(\"arial\", 30) line1 = font.render(f\"Game is over! Your", "over! Your score is {self.snake1.length + self.snake2.length}\", True, WHITE_COLOR) self.surface.blit(line1, (200, 300)) #", "== K_DOWN: self.snake1.move_down() # for the second snake if self.snake2.dead == False: if", "= False self.snake2.dead = False self.snake1.x = [920] self.snake1.y = [40] self.snake2.x =", "colliding with the wall boundaries # collides with boundaries. if self.snake1.dead == False:", "event.key == K_a: self.snake2.move_left() if event.key == K_d: self.snake2.move_right() if event.key == K_w:", "350)) pygame.mixer.music.pause() pygame.display.flip() def run(self): running = True pause = False while running:", "self.play_sound(\"crash\") self.snake2.dead = True # TODO if snake1's head eats snake2 it becomes", "= self.y[i - 1] # update head if self.direction == 'left': self.x[0] -=", "line2 = font.render(\"To play again press Enter. To exit press Escape!\", True, WHITE_COLOR)", "False self.snake2.dead = False # todo for the two apples self.apple1 = Apple(self.surface)", "pygame.image.load(\"resources/gold_apple.jpg\").convert() self.apple2.draw() def play_background_music(self): # todo spend times read docs pygame.mixer.music.load('resources/My_Hero_Academy_OP.mp3') pygame.mixer.music.play(-1, 0)", "1000 * i self.snake1.y[i] = -10000000000 - 1000 * i if self.snake2.dead ==", "pause: # for the first snake if self.snake1.dead == False: if event.key ==", "self.play_sound(\"ding\") self.snake1.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple1.move() self.apple1.draw()", "x1, y1, x2, y2): if x1 >= x2 and x1 < x2 +", "False: self.snake2.walk() print(\"BAKUGOU ALIVE\") # self.snake2.walk() # TODO: Make the apple become two", "self.snake1.y[i] = -10000000000 - 1000 * i if self.snake2.dead == False: if self.collide_boundaries(self.snake2.x[0],", "def __init__(self, parent_screen): self.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.parent_screen = parent_screen self.x = random.randint(1, 24)", "true ?? \") raise \"Collision Occured\" if self.snake1.dead == False: self.snake1.walk() print(\"DEKU ALIVE\")", "True # TODO if snake2's head eats snake1 it becomes bigger if self.snake1.dead", "it becomes bigger if self.snake1.dead == False and self.snake2.dead == False: for i", "self.parent_screen.blit(self.image, (self.x[i], self.y[i])) print(self.x[i], self.y[i]) pygame.display.update() def increase_length(self): self.length += 1 self.x.append(-1) self.y.append(-1)", "(850, 10)) def show_game_over(self): self.render_background() font = pygame.font.SysFont(\"arial\", 30) line1 = font.render(f\"Game is", "if self.snake1.dead == False and self.snake2.dead == False: for i in range(self.snake1.length): if", "self.snake1.move_up() if event.key == K_DOWN: self.snake1.move_down() # for the second snake if self.snake2.dead", "range(self.length - 1, 0, -1): self.x[i] = self.x[i - 1] self.y[i] = self.y[i", "snakes' positions self.snake1.x = [920] self.snake1.y = [40] self.snake2.x = [40] self.snake2.y =", "1000 * i def display_score(self): font = pygame.font.SysFont('arial', 30) score = font.render(f\"Score: {self.snake1.length", "if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple2.x,", "becomes bigger # snake colliding with the wall boundaries # collides with boundaries.", "[920] self.snake1.y = [40] self.snake2.x = [40] self.snake2.y = [40] self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert()", "y2 and y1 < y2 + SIZE: return True return False def collide_boundaries(self,", "== \"ding\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\ding.mp3\") pygame.mixer.Sound.play(sound) # pygame.mixer.Sound.play(pygame.mixer.Sound(\"resouces/ding.mp3\")) print(\"todo recover\") def reset(self): self.snake1", "if self.snake2.dead == False: if self.collide_boundaries(self.snake2.x[0], self.snake2.y[0]): self.play_sound(\"crash\") self.snake2.dead = True for i", "def collide_boundaries(self, x, y): if x > 1000 or x < 0: print(\"is", "if self.collide_boundaries(self.snake1.x[0], self.snake1.y[0]): self.play_sound(\"crash\") self.snake1.dead = True for i in range(self.snake1.length): self.snake1.x[i] =", "= [40] self.snake2.x = [40] self.snake2.y = [40] self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image =", "30) line1 = font.render(f\"Game is over! Your score is {self.snake1.length + self.snake2.length}\", True,", "self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() # for initialize the two snakes' positions", "= False while running: for event in pygame.event.get(): if event.type == KEYDOWN: if", "> 1000 or x < 0: print(\"is collide\") return True if y >", "for the screen update # snake colliding with apple if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple1.x,", "= True for i in range(self.snake1.length): self.snake1.x[i] = -10000000000 - 1000 * i", "self.apple1 = Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple1.draw() self.apple2 = Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert()", "for the blue one. # for initialize the two snakes's pictures. self.snake1.image =", "K_s: self.snake2.move_down() elif event.type == QUIT: running = False try: if not pause:", "y2 + SIZE: return True return False def collide_boundaries(self, x, y): if x", "K_LEFT: self.snake1.move_left() if event.key == K_RIGHT: self.snake1.move_right() if event.key == K_UP: self.snake1.move_up() if", "if y > 800 or y < 0: print(\"is collide\") return True print(\"not", "self.apple2.draw() self.display_score() pygame.display.flip() # for the screen update # snake colliding with apple", "event.key == K_w: self.snake2.move_up() if event.key == K_s: self.snake2.move_down() elif event.type == QUIT:", "self.snake1.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True if self.snake2.dead == False: for i", "one. # for initialize the two snakes's pictures. self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image =", "True if y > 800 or y < 0: print(\"is collide\") return True", "== 'up': self.y[0] -= SIZE elif self.direction == 'down': self.y[0] += SIZE self.draw()", "positions self.snake1.x = [920] self.snake1.y = [40] self.snake2.x = [40] self.snake2.y = [40]", "pygame.mixer.Sound.play(sound) # pygame.mixer.Sound.play(pygame.mixer.Sound(\"resouces/ding.mp3\")) print(\"todo recover\") def reset(self): self.snake1 = Snake(self.surface) self.snake2 = Snake(self.surface)", "== False: self.snake2.walk() print(\"BAKUGOU ALIVE\") # self.snake2.walk() # TODO: Make the apple become", "self.apple1.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple1.move()", "self.snake2.x[i] = -10000000000 - 1000 * i self.snake2.y[i] = -10000000000 - 1000 *", "False and self.snake2.dead == False: for i in range(self.snake2.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake2.x[i],", "self.snake2.dead == False: for i in range(self.snake1.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\")", "sound_name == \"crash\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\crash.mp3\") pygame.mixer.Sound.play(sound) elif sound_name == \"ding\": sound =", "def is_collision(self, x1, y1, x2, y2): if x1 >= x2 and x1 <", "self.snake2.move_left() if event.key == K_d: self.snake2.move_right() if event.key == K_w: self.snake2.move_up() if event.key", "Snake And Apple Game\") pygame.mixer.init() self.play_background_music() self.surface = pygame.display.set_mode((1000, 800)) self.snake1 = Snake(self.surface)", "= (110, 110, 5) WHITE_COLOR = (255, 255, 255) class Apple: def __init__(self,", "False: for i in range(3, self.snake2.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead", "= [40] self.snake2.x = [40] self.snake2.y = [40] self.snake1.draw() self.snake2.draw() self.snake1.dead = False", "one. self.snake2 = Snake(self.surface) # for the blue one. # for initialize the", "+= SIZE elif self.direction == 'up': self.y[0] -= SIZE elif self.direction == 'down':", "self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True if self.snake2.dead == False: for", "self.play_sound(\"crash\") self.snake1.dead = True for i in range(self.snake1.length): self.snake1.x[i] = -10000000000 - 1000", "== K_a: self.snake2.move_left() if event.key == K_d: self.snake2.move_right() if event.key == K_w: self.snake2.move_up()", "== K_w: self.snake2.move_up() if event.key == K_s: self.snake2.move_down() elif event.type == QUIT: running", "= random.randint(1, 24) * SIZE self.y = random.randint(1, 19) * SIZE class Snake:", "snakes's pictures. self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() # for initialize the two", "True, WHITE_COLOR) self.surface.blit(line2, (200, 350)) pygame.mixer.music.pause() pygame.display.flip() def run(self): running = True pause", "self.apple2.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple2.move() self.apple2.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple2.move()", "[SIZE] self.y = [SIZE] self.dead = False def move_left(self): self.direction = 'left' def", "+ self.snake2.length}\", True, (200, 200, 200)) self.surface.blit(score, (850, 10)) def show_game_over(self): self.render_background() font", "the second snake if self.snake2.dead == False: if event.key == K_a: self.snake2.move_left() if", "self.snake2.dead == True: print(\"default true ?? \") raise \"Collision Occured\" if self.snake1.dead ==", "= [920] self.snake1.y = [40] self.snake2.x = [40] self.snake2.y = [40] self.snake1.draw() self.snake2.draw()", "self.x[0] -= SIZE elif self.direction == 'right': self.x[0] += SIZE elif self.direction ==", "= True if self.snake2.dead == False: for i in range(3, self.snake2.length): if self.is_collision(self.snake2.x[0],", "self.x[i - 1] self.y[i] = self.y[i - 1] # update head if self.direction", "play again press Enter. To exit press Escape!\", True, WHITE_COLOR) self.surface.blit(line2, (200, 350))", "# for the two snake # if self.snake2.dead == True: # print(\"go expection\")", "print(e) print(\"raise exception\") self.show_game_over() pause = True self.reset() # self.snake.walk() time.sleep(.1) if __name__", "head eats it becomes bigger # snake colliding with the wall boundaries #", "False: if self.collide_boundaries(self.snake2.x[0], self.snake2.y[0]): self.play_sound(\"crash\") self.snake2.dead = True for i in range(self.snake2.length): self.snake2.x[i]", "False: if self.collide_boundaries(self.snake1.x[0], self.snake1.y[0]): self.play_sound(\"crash\") self.snake1.dead = True for i in range(self.snake1.length): self.snake1.x[i]", "y1 < y2 + SIZE: return True return False def collide_boundaries(self, x, y):", "collides with boundaries. if self.snake1.dead == False: if self.collide_boundaries(self.snake1.x[0], self.snake1.y[0]): self.play_sound(\"crash\") self.snake1.dead =", "move_right(self): self.direction = 'right' def move_up(self): self.direction = 'up' def move_down(self): self.direction =", "True # TODO if both's head eats it becomes bigger # snake colliding", "event.key == K_UP: self.snake1.move_up() if event.key == K_DOWN: self.snake1.move_down() # for the second", "KEYDOWN: if event.key == K_ESCAPE: running = False if event.key == K_RETURN: pygame.mixer.music.unpause()", "self.apple2.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple2.move() self.apple2.draw() # todo make these sankes over the window", "Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() def is_collision(self, x1, y1, x2, y2): if x1 >=", "i if self.snake2.dead == False: if self.collide_boundaries(self.snake2.x[0], self.snake2.y[0]): self.play_sound(\"crash\") self.snake2.dead = True for", "walk(self): # update body for i in range(self.length - 1, 0, -1): self.x[i]", "blit(source, postion) line2 = font.render(\"To play again press Enter. To exit press Escape!\",", "self.apple1.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple2.move()", "import time import random SIZE = 40 BACKGROUND_COLOR = (110, 110, 5) WHITE_COLOR", "if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True # TODO if both's", "* i def display_score(self): font = pygame.font.SysFont('arial', 30) score = font.render(f\"Score: {self.snake1.length +", "Escape!\", True, WHITE_COLOR) self.surface.blit(line2, (200, 350)) pygame.mixer.music.pause() pygame.display.flip() def run(self): running = True", "self.direction == 'right': self.x[0] += SIZE elif self.direction == 'up': self.y[0] -= SIZE", "* i if self.snake2.dead == False: if self.collide_boundaries(self.snake2.x[0], self.snake2.y[0]): self.play_sound(\"crash\") self.snake2.dead = True", "1] # update head if self.direction == 'left': self.x[0] -= SIZE elif self.direction", "SIZE class Snake: def __init__(self, parent_screen): self.parent_screen = parent_screen self.image = pygame.image.load(\"resources/block.jpg\").convert() self.direction", "def play_sound(self, sound_name): if sound_name == \"crash\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\crash.mp3\") pygame.mixer.Sound.play(sound) elif sound_name", "# update body for i in range(self.length - 1, 0, -1): self.x[i] =", "for initialize the two snakes' positions self.snake1.x = [920] self.snake1.y = [40] self.snake2.x", "= False # todo for the two apples self.apple1 = Apple(self.surface) self.apple1.image =", "# TODO if snake2's head eats snake1 it becomes bigger if self.snake1.dead ==", "print(self.x[i], self.y[i]) pygame.display.update() def increase_length(self): self.length += 1 self.x.append(-1) self.y.append(-1) class Game: def", "exception\") self.show_game_over() pause = True self.reset() # self.snake.walk() time.sleep(.1) if __name__ == \"__main__\":", "= Snake(self.surface) self.snake1.dead = False self.snake2.dead = False self.snake1.x = [920] self.snake1.y =", "self.surface.blit(line1, (200, 300)) # blit(source, postion) line2 = font.render(\"To play again press Enter.", "self.snake2.dead = True # TODO if snake1's head eats snake2 it becomes bigger", "- 1000 * i self.snake2.y[i] = -10000000000 - 1000 * i def display_score(self):", "font = pygame.font.SysFont('arial', 30) score = font.render(f\"Score: {self.snake1.length + self.snake2.length}\", True, (200, 200,", "event.key == K_s: self.snake2.move_down() elif event.type == QUIT: running = False try: if", "with apple if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake2.x[0],", "SIZE self.draw() def draw(self): for i in range(self.length): self.parent_screen.blit(self.image, (self.x[i], self.y[i])) print(self.x[i], self.y[i])", "self.snake1.draw() self.snake2.draw() self.snake1.dead = False self.snake2.dead = False # todo for the two", "if x > 1000 or x < 0: print(\"is collide\") return True if", "self.direction == 'up': self.y[0] -= SIZE elif self.direction == 'down': self.y[0] += SIZE", "'right': self.x[0] += SIZE elif self.direction == 'up': self.y[0] -= SIZE elif self.direction", "self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple2.move() self.apple2.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake2.increase_length()", "print(\"is collide\") return True print(\"not collide\") return False def render_background(self): bg = pygame.image.load(\"resources/background_hero.jpg\")", "self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() # for initialize the two snakes' positions self.snake1.x = [920]", "pygame.mixer.music.play(-1, 0) def play_sound(self, sound_name): if sound_name == \"crash\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\crash.mp3\") pygame.mixer.Sound.play(sound)", "0: print(\"is collide\") return True print(\"not collide\") return False def render_background(self): bg =", "self.play_sound(\"ding\") self.snake1.increase_length() self.apple2.move() self.apple2.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple2.move() self.apple2.draw()", "window # snake colliding with itself if self.snake1.dead == False: for i in", "True for i in range(self.snake1.length): self.snake1.x[i] = -10000000000 - 1000 * i self.snake1.y[i]", "pygame.mixer.music.pause() pygame.display.flip() def run(self): running = True pause = False while running: for", "press Escape!\", True, WHITE_COLOR) self.surface.blit(line2, (200, 350)) pygame.mixer.music.pause() pygame.display.flip() def run(self): running =", "def walk(self): # update body for i in range(self.length - 1, 0, -1):", "if self.snake2.dead == False: for i in range(3, self.snake2.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake2.x[i],", "self.snake2.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\")", "pygame.mixer.music.unpause() pause = False if not pause: # for the first snake if", "event.key == K_RIGHT: self.snake1.move_right() if event.key == K_UP: self.snake1.move_up() if event.key == K_DOWN:", "elif self.direction == 'right': self.x[0] += SIZE elif self.direction == 'up': self.y[0] -=", "the two snakes's pictures. self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() # for initialize", "for i in range(self.snake1.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True", "pause = True self.reset() # self.snake.walk() time.sleep(.1) if __name__ == \"__main__\": game =", "for initialize the two snakes's pictures. self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() #", "# print(\"go expection\") if self.snake1.dead == True and self.snake2.dead == True: print(\"default true", "y > 800 or y < 0: print(\"is collide\") return True print(\"not collide\")", "these sankes over the window # snake colliding with itself if self.snake1.dead ==", "# for the screen update # snake colliding with apple if self.is_collision(self.snake1.x[0], self.snake1.y[0],", "self.play_sound(\"ding\") self.snake2.increase_length() self.apple2.move() self.apple2.draw() # todo make these sankes over the window #", "the wall boundaries # collides with boundaries. if self.snake1.dead == False: if self.collide_boundaries(self.snake1.x[0],", "self.surface.blit(score, (850, 10)) def show_game_over(self): self.render_background() font = pygame.font.SysFont(\"arial\", 30) line1 = font.render(f\"Game", "pygame.image.load(\"resources/background_hero.jpg\") self.surface.blit(bg, (0, 0)) def play(self): self.render_background() # for the two snake #", "pygame.font.SysFont(\"arial\", 30) line1 = font.render(f\"Game is over! Your score is {self.snake1.length + self.snake2.length}\",", "def __init__(self): pygame.init() pygame.display.set_caption(\"Codebasics Snake And Apple Game\") pygame.mixer.init() self.play_background_music() self.surface = pygame.display.set_mode((1000,", "with itself if self.snake1.dead == False: for i in range(3, self.snake1.length): if self.is_collision(self.snake1.x[0],", "True # TODO if snake1's head eats snake2 it becomes bigger if self.snake1.dead", "# TODO if both's head eats it becomes bigger # snake colliding with", "self.snake2.y[i] = -10000000000 - 1000 * i def display_score(self): font = pygame.font.SysFont('arial', 30)", "self.snake2.move_down() elif event.type == QUIT: running = False try: if not pause: self.play()", "800 or y < 0: print(\"is collide\") return True print(\"not collide\") return False", "== K_ESCAPE: running = False if event.key == K_RETURN: pygame.mixer.music.unpause() pause = False", "event in pygame.event.get(): if event.type == KEYDOWN: if event.key == K_ESCAPE: running =", "for i in range(3, self.snake1.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead =", "event.key == K_LEFT: self.snake1.move_left() if event.key == K_RIGHT: self.snake1.move_right() if event.key == K_UP:", "colliding with itself if self.snake1.dead == False: for i in range(3, self.snake1.length): if", "True return False def collide_boundaries(self, x, y): if x > 1000 or x", "= 'right' def move_up(self): self.direction = 'up' def move_down(self): self.direction = 'down' def", "if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple2.move() self.apple2.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple2.x,", "[40] self.snake2.x = [40] self.snake2.y = [40] self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert()", "def draw(self): for i in range(self.length): self.parent_screen.blit(self.image, (self.x[i], self.y[i])) print(self.x[i], self.y[i]) pygame.display.update() def", "= True # TODO if snake2's head eats snake1 it becomes bigger if", "True, WHITE_COLOR) self.surface.blit(line1, (200, 300)) # blit(source, postion) line2 = font.render(\"To play again", "self.direction = 'down' self.length = 1 self.x = [SIZE] # SIZE == 40", "SIZE = 40 BACKGROUND_COLOR = (110, 110, 5) WHITE_COLOR = (255, 255, 255)", "eats snake2 it becomes bigger if self.snake1.dead == False and self.snake2.dead == False:", "== \"crash\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\crash.mp3\") pygame.mixer.Sound.play(sound) elif sound_name == \"ding\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\ding.mp3\")", "Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() self.apple2.draw() def play_background_music(self): # todo spend times read docs", "= [40] self.snake2.y = [40] self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() self.apple1 =", "running = False try: if not pause: self.play() except Exception as e: print(e)", "self.snake1.y = [40] self.snake2.x = [40] self.snake2.y = [40] self.snake1.draw() self.snake2.draw() self.snake1.dead =", "running = True pause = False while running: for event in pygame.event.get(): if", "= 'down' self.length = 1 self.x = [SIZE] # SIZE == 40 [SIZE]", "x1 < x2 + SIZE: if y1 >= y2 and y1 < y2", "self.snake1.y = [40] self.snake2.x = [40] self.snake2.y = [40] self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image", "- 1000 * i self.snake1.y[i] = -10000000000 - 1000 * i if self.snake2.dead", "= -10000000000 - 1000 * i def display_score(self): font = pygame.font.SysFont('arial', 30) score", "if self.snake1.dead == False: if self.collide_boundaries(self.snake1.x[0], self.snake1.y[0]): self.play_sound(\"crash\") self.snake1.dead = True for i", "== False: if event.key == K_LEFT: self.snake1.move_left() if event.key == K_RIGHT: self.snake1.move_right() if", "WHITE_COLOR) self.surface.blit(line2, (200, 350)) pygame.mixer.music.pause() pygame.display.flip() def run(self): running = True pause =", "i in range(self.snake1.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True #", "if event.key == K_s: self.snake2.move_down() elif event.type == QUIT: running = False try:", "first snake if self.snake1.dead == False: if event.key == K_LEFT: self.snake1.move_left() if event.key", "apple become two self.apple1.draw() self.apple2.draw() self.display_score() pygame.display.flip() # for the screen update #", "bigger if self.snake1.dead == False and self.snake2.dead == False: for i in range(self.snake2.length):", "= Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple1.draw() self.apple2 = Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() self.apple2.draw()", "body for i in range(self.length - 1, 0, -1): self.x[i] = self.x[i -", "* SIZE def draw(self): self.parent_screen.blit(self.image, (self.x, self.y)) pygame.display.flip() def move(self): self.x = random.randint(1,", "bigger if self.snake1.dead == False and self.snake2.dead == False: for i in range(self.snake1.length):", "def move_up(self): self.direction = 'up' def move_down(self): self.direction = 'down' def walk(self): #", "self.x[0] += SIZE elif self.direction == 'up': self.y[0] -= SIZE elif self.direction ==", "return True print(\"not collide\") return False def render_background(self): bg = pygame.image.load(\"resources/background_hero.jpg\") self.surface.blit(bg, (0,", "self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True # TODO if snake1's head eats snake2 it", "return False def collide_boundaries(self, x, y): if x > 1000 or x <", "pause = False while running: for event in pygame.event.get(): if event.type == KEYDOWN:", "self.y.append(-1) class Game: def __init__(self): pygame.init() pygame.display.set_caption(\"Codebasics Snake And Apple Game\") pygame.mixer.init() self.play_background_music()", "(110, 110, 5) WHITE_COLOR = (255, 255, 255) class Apple: def __init__(self, parent_screen):", "self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True # TODO if snake1's head eats snake2", "for event in pygame.event.get(): if event.type == KEYDOWN: if event.key == K_ESCAPE: running", "+ SIZE: return True return False def collide_boundaries(self, x, y): if x >", "To exit press Escape!\", True, WHITE_COLOR) self.surface.blit(line2, (200, 350)) pygame.mixer.music.pause() pygame.display.flip() def run(self):", "if self.snake1.dead == False: for i in range(3, self.snake1.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake1.x[i],", "self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True if self.snake2.dead == False: for i in range(3,", "random SIZE = 40 BACKGROUND_COLOR = (110, 110, 5) WHITE_COLOR = (255, 255,", "K_a: self.snake2.move_left() if event.key == K_d: self.snake2.move_right() if event.key == K_w: self.snake2.move_up() if", "[40] self.snake1.draw() self.snake2.draw() self.snake1.dead = False self.snake2.dead = False # todo for the", "self.snake1.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple1.move() self.apple1.draw() if", "* i self.snake1.y[i] = -10000000000 - 1000 * i if self.snake2.dead == False:", "self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() self.apple1 = Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple2", "= False try: if not pause: self.play() except Exception as e: print(e) print(\"raise", "for i in range(self.length - 1, 0, -1): self.x[i] = self.x[i - 1]", "display_score(self): font = pygame.font.SysFont('arial', 30) score = font.render(f\"Score: {self.snake1.length + self.snake2.length}\", True, (200,", "# SIZE == 40 [SIZE] self.y = [SIZE] self.dead = False def move_left(self):", "self.snake1.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True # TODO if snake2's head eats", "< 0: print(\"is collide\") return True if y > 800 or y <", "self.collide_boundaries(self.snake1.x[0], self.snake1.y[0]): self.play_sound(\"crash\") self.snake1.dead = True for i in range(self.snake1.length): self.snake1.x[i] = -10000000000", "# self.snake2.walk() # TODO: Make the apple become two self.apple1.draw() self.apple2.draw() self.display_score() pygame.display.flip()", "yellow one. self.snake2 = Snake(self.surface) # for the blue one. # for initialize", "= font.render(\"To play again press Enter. To exit press Escape!\", True, WHITE_COLOR) self.surface.blit(line2,", "the first snake if self.snake1.dead == False: if event.key == K_LEFT: self.snake1.move_left() if", "<filename>Two_snake_version.py import pygame from pygame.locals import * import time import random SIZE =", "draw(self): for i in range(self.length): self.parent_screen.blit(self.image, (self.x[i], self.y[i])) print(self.x[i], self.y[i]) pygame.display.update() def increase_length(self):", "and y1 < y2 + SIZE: return True return False def collide_boundaries(self, x,", "self.snake2.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple2.move() self.apple2.draw() # todo make these sankes over", "snake if self.snake1.dead == False: if event.key == K_LEFT: self.snake1.move_left() if event.key ==", "- 1] # update head if self.direction == 'left': self.x[0] -= SIZE elif", "1000 * i if self.snake2.dead == False: if self.collide_boundaries(self.snake2.x[0], self.snake2.y[0]): self.play_sound(\"crash\") self.snake2.dead =", "i in range(self.snake1.length): self.snake1.x[i] = -10000000000 - 1000 * i self.snake1.y[i] = -10000000000", "== K_UP: self.snake1.move_up() if event.key == K_DOWN: self.snake1.move_down() # for the second snake", "# pygame.mixer.Sound.play(pygame.mixer.Sound(\"resouces/ding.mp3\")) print(\"todo recover\") def reset(self): self.snake1 = Snake(self.surface) self.snake2 = Snake(self.surface) self.snake1.dead", "random.randint(1, 19) * SIZE class Snake: def __init__(self, parent_screen): self.parent_screen = parent_screen self.image", "def increase_length(self): self.length += 1 self.x.append(-1) self.y.append(-1) class Game: def __init__(self): pygame.init() pygame.display.set_caption(\"Codebasics", "False: for i in range(self.snake1.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead =", "'left' def move_right(self): self.direction = 'right' def move_up(self): self.direction = 'up' def move_down(self):", "pygame.display.set_caption(\"Codebasics Snake And Apple Game\") pygame.mixer.init() self.play_background_music() self.surface = pygame.display.set_mode((1000, 800)) self.snake1 =", "event.type == QUIT: running = False try: if not pause: self.play() except Exception", "snake2's head eats snake1 it becomes bigger if self.snake1.dead == False and self.snake2.dead", "\") raise \"Collision Occured\" if self.snake1.dead == False: self.snake1.walk() print(\"DEKU ALIVE\") if self.snake2.dead", "[40] self.snake2.y = [40] self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() self.apple1 = Apple(self.surface)", "def play(self): self.render_background() # for the two snake # if self.snake2.dead == True:", "i in range(3, self.snake2.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True", "both's head eats it becomes bigger # snake colliding with the wall boundaries", "K_UP: self.snake1.move_up() if event.key == K_DOWN: self.snake1.move_down() # for the second snake if", "print(\"raise exception\") self.show_game_over() pause = True self.reset() # self.snake.walk() time.sleep(.1) if __name__ ==", "+= SIZE self.draw() def draw(self): for i in range(self.length): self.parent_screen.blit(self.image, (self.x[i], self.y[i])) print(self.x[i],", "= parent_screen self.image = pygame.image.load(\"resources/block.jpg\").convert() self.direction = 'down' self.length = 1 self.x =", "= pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple1.draw() self.apple2 = Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() self.apple2.draw() def play_background_music(self): #", "pygame.locals import * import time import random SIZE = 40 BACKGROUND_COLOR = (110,", "K_RIGHT: self.snake1.move_right() if event.key == K_UP: self.snake1.move_up() if event.key == K_DOWN: self.snake1.move_down() #", "print(\"default true ?? \") raise \"Collision Occured\" if self.snake1.dead == False: self.snake1.walk() print(\"DEKU", "-10000000000 - 1000 * i self.snake2.y[i] = -10000000000 - 1000 * i def", "and self.snake2.dead == True: print(\"default true ?? \") raise \"Collision Occured\" if self.snake1.dead", "times read docs pygame.mixer.music.load('resources/My_Hero_Academy_OP.mp3') pygame.mixer.music.play(-1, 0) def play_sound(self, sound_name): if sound_name == \"crash\":", "'up' def move_down(self): self.direction = 'down' def walk(self): # update body for i", "pygame.mixer.Sound.play(sound) elif sound_name == \"ding\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\ding.mp3\") pygame.mixer.Sound.play(sound) # pygame.mixer.Sound.play(pygame.mixer.Sound(\"resouces/ding.mp3\")) print(\"todo recover\")", "self.display_score() pygame.display.flip() # for the screen update # snake colliding with apple if", "(self.x, self.y)) pygame.display.flip() def move(self): self.x = random.randint(1, 24) * SIZE self.y =", "self.snake2.move_right() if event.key == K_w: self.snake2.move_up() if event.key == K_s: self.snake2.move_down() elif event.type", "return True if y > 800 or y < 0: print(\"is collide\") return", "apple if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0],", "True print(\"not collide\") return False def render_background(self): bg = pygame.image.load(\"resources/background_hero.jpg\") self.surface.blit(bg, (0, 0))", "(0, 0)) def play(self): self.render_background() # for the two snake # if self.snake2.dead", "self.snake1.x = [920] self.snake1.y = [40] self.snake2.x = [40] self.snake2.y = [40] self.snake1.image", "press Enter. To exit press Escape!\", True, WHITE_COLOR) self.surface.blit(line2, (200, 350)) pygame.mixer.music.pause() pygame.display.flip()", "pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple1.draw() self.apple2 = Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() self.apple2.draw() def play_background_music(self): # todo", "self.snake1.dead = True # TODO if both's head eats it becomes bigger #", "self.length = 1 self.x = [SIZE] # SIZE == 40 [SIZE] self.y =", "{self.snake1.length + self.snake2.length}\", True, (200, 200, 200)) self.surface.blit(score, (850, 10)) def show_game_over(self): self.render_background()", "5) WHITE_COLOR = (255, 255, 255) class Apple: def __init__(self, parent_screen): self.image =", "not pause: # for the first snake if self.snake1.dead == False: if event.key", "self.apple1.move() self.apple1.draw() if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple2.move() self.apple2.draw() if self.is_collision(self.snake2.x[0],", "raise \"Collision Occured\" if self.snake1.dead == False: self.snake1.walk() print(\"DEKU ALIVE\") if self.snake2.dead ==", "self.snake1 = Snake(self.surface) # for the yellow one. self.snake2 = Snake(self.surface) # for", "elif self.direction == 'up': self.y[0] -= SIZE elif self.direction == 'down': self.y[0] +=", "expection\") if self.snake1.dead == True and self.snake2.dead == True: print(\"default true ?? \")", "self.snake2.y = [40] self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() self.apple1 = Apple(self.surface) self.apple1.image", "for the two snake # if self.snake2.dead == True: # print(\"go expection\") if", "move_up(self): self.direction = 'up' def move_down(self): self.direction = 'down' def walk(self): # update", "show_game_over(self): self.render_background() font = pygame.font.SysFont(\"arial\", 30) line1 = font.render(f\"Game is over! Your score", "= pygame.image.load(\"resources/bakugou.jpg\").convert() self.apple1 = Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple2 = Apple(self.surface) self.apple2.image =", "pause: self.play() except Exception as e: print(e) print(\"raise exception\") self.show_game_over() pause = True", "= 'up' def move_down(self): self.direction = 'down' def walk(self): # update body for", "from pygame.locals import * import time import random SIZE = 40 BACKGROUND_COLOR =", "if event.key == K_a: self.snake2.move_left() if event.key == K_d: self.snake2.move_right() if event.key ==", "in range(3, self.snake2.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True #", "self.snake2.length}\", True, WHITE_COLOR) self.surface.blit(line1, (200, 300)) # blit(source, postion) line2 = font.render(\"To play", "== 40 [SIZE] self.y = [SIZE] self.dead = False def move_left(self): self.direction =", "pygame.display.set_mode((1000, 800)) self.snake1 = Snake(self.surface) # for the yellow one. self.snake2 = Snake(self.surface)", "bg = pygame.image.load(\"resources/background_hero.jpg\") self.surface.blit(bg, (0, 0)) def play(self): self.render_background() # for the two", "self.snake1.move_right() if event.key == K_UP: self.snake1.move_up() if event.key == K_DOWN: self.snake1.move_down() # for", "def move_left(self): self.direction = 'left' def move_right(self): self.direction = 'right' def move_up(self): self.direction", "event.type == KEYDOWN: if event.key == K_ESCAPE: running = False if event.key ==", "self.snake2.dead == False: self.snake2.walk() print(\"BAKUGOU ALIVE\") # self.snake2.walk() # TODO: Make the apple", "= False self.snake2.dead = False # todo for the two apples self.apple1 =", "Game: def __init__(self): pygame.init() pygame.display.set_caption(\"Codebasics Snake And Apple Game\") pygame.mixer.init() self.play_background_music() self.surface =", "score is {self.snake1.length + self.snake2.length}\", True, WHITE_COLOR) self.surface.blit(line1, (200, 300)) # blit(source, postion)", "if self.snake1.dead == False: if event.key == K_LEFT: self.snake1.move_left() if event.key == K_RIGHT:", "i in range(self.snake2.length): self.snake2.x[i] = -10000000000 - 1000 * i self.snake2.y[i] = -10000000000", "or x < 0: print(\"is collide\") return True if y > 800 or", "-10000000000 - 1000 * i if self.snake2.dead == False: if self.collide_boundaries(self.snake2.x[0], self.snake2.y[0]): self.play_sound(\"crash\")", "BACKGROUND_COLOR = (110, 110, 5) WHITE_COLOR = (255, 255, 255) class Apple: def", "0)) def play(self): self.render_background() # for the two snake # if self.snake2.dead ==", "= pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\ding.mp3\") pygame.mixer.Sound.play(sound) # pygame.mixer.Sound.play(pygame.mixer.Sound(\"resouces/ding.mp3\")) print(\"todo recover\") def reset(self): self.snake1 = Snake(self.surface) self.snake2", "self.snake2.y[0]): self.play_sound(\"crash\") self.snake2.dead = True for i in range(self.snake2.length): self.snake2.x[i] = -10000000000 -", "= True self.reset() # self.snake.walk() time.sleep(.1) if __name__ == \"__main__\": game = Game()", "self.snake1.dead == True and self.snake2.dead == True: print(\"default true ?? \") raise \"Collision", "self.y = [SIZE] self.dead = False def move_left(self): self.direction = 'left' def move_right(self):", "score = font.render(f\"Score: {self.snake1.length + self.snake2.length}\", True, (200, 200, 200)) self.surface.blit(score, (850, 10))", "move_left(self): self.direction = 'left' def move_right(self): self.direction = 'right' def move_up(self): self.direction =", "self.snake2.dead == False: for i in range(3, self.snake2.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake2.x[i], self.snake2.y[i]):", "[SIZE] self.dead = False def move_left(self): self.direction = 'left' def move_right(self): self.direction =", "= random.randint(1, 19) * SIZE class Snake: def __init__(self, parent_screen): self.parent_screen = parent_screen", "= self.x[i - 1] self.y[i] = self.y[i - 1] # update head if", "as e: print(e) print(\"raise exception\") self.show_game_over() pause = True self.reset() # self.snake.walk() time.sleep(.1)", "render_background(self): bg = pygame.image.load(\"resources/background_hero.jpg\") self.surface.blit(bg, (0, 0)) def play(self): self.render_background() # for the", "SIZE: if y1 >= y2 and y1 < y2 + SIZE: return True", "if event.key == K_RETURN: pygame.mixer.music.unpause() pause = False if not pause: # for", "make these sankes over the window # snake colliding with itself if self.snake1.dead", "0, -1): self.x[i] = self.x[i - 1] self.y[i] = self.y[i - 1] #", "if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True if self.snake2.dead == False:", "x, y): if x > 1000 or x < 0: print(\"is collide\") return", "with the wall boundaries # collides with boundaries. if self.snake1.dead == False: if", "def reset(self): self.snake1 = Snake(self.surface) self.snake2 = Snake(self.surface) self.snake1.dead = False self.snake2.dead =", "= Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() def is_collision(self, x1, y1, x2, y2): if x1", "or y < 0: print(\"is collide\") return True print(\"not collide\") return False def", "255, 255) class Apple: def __init__(self, parent_screen): self.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.parent_screen = parent_screen", "1 self.x = [SIZE] # SIZE == 40 [SIZE] self.y = [SIZE] self.dead", "pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() self.apple1 = Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple2 = Apple(self.surface)", "self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple2.x, self.apple2.y):", "= (255, 255, 255) class Apple: def __init__(self, parent_screen): self.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.parent_screen", "is {self.snake1.length + self.snake2.length}\", True, WHITE_COLOR) self.surface.blit(line1, (200, 300)) # blit(source, postion) line2", "if x1 >= x2 and x1 < x2 + SIZE: if y1 >=", "Snake: def __init__(self, parent_screen): self.parent_screen = parent_screen self.image = pygame.image.load(\"resources/block.jpg\").convert() self.direction = 'down'", "# collides with boundaries. if self.snake1.dead == False: if self.collide_boundaries(self.snake1.x[0], self.snake1.y[0]): self.play_sound(\"crash\") self.snake1.dead", "= font.render(f\"Score: {self.snake1.length + self.snake2.length}\", True, (200, 200, 200)) self.surface.blit(score, (850, 10)) def", "the two snakes' positions self.snake1.x = [920] self.snake1.y = [40] self.snake2.x = [40]", "self.snake1.dead == False: for i in range(3, self.snake1.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake1.x[i], self.snake1.y[i]):", "-10000000000 - 1000 * i self.snake1.y[i] = -10000000000 - 1000 * i if", "= pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.parent_screen = parent_screen self.x = random.randint(1, 24) * SIZE self.y =", "1000 or x < 0: print(\"is collide\") return True if y > 800", "colliding with apple if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple1.move() self.apple1.draw() if", "def __init__(self, parent_screen): self.parent_screen = parent_screen self.image = pygame.image.load(\"resources/block.jpg\").convert() self.direction = 'down' self.length", "parent_screen): self.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.parent_screen = parent_screen self.x = random.randint(1, 24) * SIZE", "self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple1.draw() self.apple2 = Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() self.apple2.draw() def play_background_music(self):", "range(3, self.snake1.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True if self.snake2.dead", "sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\crash.mp3\") pygame.mixer.Sound.play(sound) elif sound_name == \"ding\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\ding.mp3\") pygame.mixer.Sound.play(sound) #", "wall boundaries # collides with boundaries. if self.snake1.dead == False: if self.collide_boundaries(self.snake1.x[0], self.snake1.y[0]):", "self.snake1.walk() print(\"DEKU ALIVE\") if self.snake2.dead == False: self.snake2.walk() print(\"BAKUGOU ALIVE\") # self.snake2.walk() #", "in range(self.snake1.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True # TODO", "= [40] self.snake1.draw() self.snake2.draw() self.snake1.dead = False self.snake2.dead = False # todo for", "Game\") pygame.mixer.init() self.play_background_music() self.surface = pygame.display.set_mode((1000, 800)) self.snake1 = Snake(self.surface) # for the", "elif sound_name == \"ding\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\ding.mp3\") pygame.mixer.Sound.play(sound) # pygame.mixer.Sound.play(pygame.mixer.Sound(\"resouces/ding.mp3\")) print(\"todo recover\") def", "False: if event.key == K_LEFT: self.snake1.move_left() if event.key == K_RIGHT: self.snake1.move_right() if event.key", "# for the second snake if self.snake2.dead == False: if event.key == K_a:", "i def display_score(self): font = pygame.font.SysFont('arial', 30) score = font.render(f\"Score: {self.snake1.length + self.snake2.length}\",", "self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake2.increase_length()", "self.snake1.dead == False: if event.key == K_LEFT: self.snake1.move_left() if event.key == K_RIGHT: self.snake1.move_right()", "screen update # snake colliding with apple if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\")", "def run(self): running = True pause = False while running: for event in", "* i self.snake2.y[i] = -10000000000 - 1000 * i def display_score(self): font =", "'down' self.length = 1 self.x = [SIZE] # SIZE == 40 [SIZE] self.y", "self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() self.apple2.draw() def play_background_music(self): # todo spend times read docs pygame.mixer.music.load('resources/My_Hero_Academy_OP.mp3')", "the screen update # snake colliding with apple if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple1.x, self.apple1.y):", "== K_RIGHT: self.snake1.move_right() if event.key == K_UP: self.snake1.move_up() if event.key == K_DOWN: self.snake1.move_down()", "0: print(\"is collide\") return True if y > 800 or y < 0:", "True self.reset() # self.snake.walk() time.sleep(.1) if __name__ == \"__main__\": game = Game() game.run()", "self.apple1.move() self.apple1.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake1.x[0],", "range(self.snake2.length): self.snake2.x[i] = -10000000000 - 1000 * i self.snake2.y[i] = -10000000000 - 1000", "snake1's head eats snake2 it becomes bigger if self.snake1.dead == False and self.snake2.dead", "for i in range(self.snake2.length): self.snake2.x[i] = -10000000000 - 1000 * i self.snake2.y[i] =", "for i in range(self.snake1.length): self.snake1.x[i] = -10000000000 - 1000 * i self.snake1.y[i] =", "x < 0: print(\"is collide\") return True if y > 800 or y", "self.snake2.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple2.move() self.apple2.draw() if", "200, 200)) self.surface.blit(score, (850, 10)) def show_game_over(self): self.render_background() font = pygame.font.SysFont(\"arial\", 30) line1", "== K_LEFT: self.snake1.move_left() if event.key == K_RIGHT: self.snake1.move_right() if event.key == K_UP: self.snake1.move_up()", "self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake1.increase_length()", "self.apple1 = Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple2 = Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() def", "play_sound(self, sound_name): if sound_name == \"crash\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\crash.mp3\") pygame.mixer.Sound.play(sound) elif sound_name ==", "- 1, 0, -1): self.x[i] = self.x[i - 1] self.y[i] = self.y[i -", "move(self): self.x = random.randint(1, 24) * SIZE self.y = random.randint(1, 19) * SIZE", "pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple2 = Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() def is_collision(self, x1, y1, x2, y2):", "== False: self.snake1.walk() print(\"DEKU ALIVE\") if self.snake2.dead == False: self.snake2.walk() print(\"BAKUGOU ALIVE\") #", "i in range(3, self.snake1.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True", "if sound_name == \"crash\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\crash.mp3\") pygame.mixer.Sound.play(sound) elif sound_name == \"ding\": sound", "self.y[i - 1] # update head if self.direction == 'left': self.x[0] -= SIZE", "collide_boundaries(self, x, y): if x > 1000 or x < 0: print(\"is collide\")", "update body for i in range(self.length - 1, 0, -1): self.x[i] = self.x[i", "self.snake1.x = [920] self.snake1.y = [40] self.snake2.x = [40] self.snake2.y = [40] self.snake1.draw()", ">= y2 and y1 < y2 + SIZE: return True return False def", "* SIZE self.y = random.randint(1, 19) * SIZE def draw(self): self.parent_screen.blit(self.image, (self.x, self.y))", "sound_name): if sound_name == \"crash\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\crash.mp3\") pygame.mixer.Sound.play(sound) elif sound_name == \"ding\":", "if self.snake2.dead == False: self.snake2.walk() print(\"BAKUGOU ALIVE\") # self.snake2.walk() # TODO: Make the", "if event.type == KEYDOWN: if event.key == K_ESCAPE: running = False if event.key", "True: # print(\"go expection\") if self.snake1.dead == True and self.snake2.dead == True: print(\"default", "self.y[i] = self.y[i - 1] # update head if self.direction == 'left': self.x[0]", "two self.apple1.draw() self.apple2.draw() self.display_score() pygame.display.flip() # for the screen update # snake colliding", "False def move_left(self): self.direction = 'left' def move_right(self): self.direction = 'right' def move_up(self):", "?? \") raise \"Collision Occured\" if self.snake1.dead == False: self.snake1.walk() print(\"DEKU ALIVE\") if", "if snake1's head eats snake2 it becomes bigger if self.snake1.dead == False and", "# if self.snake2.dead == True: # print(\"go expection\") if self.snake1.dead == True and", "= [40] self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() self.apple1 = Apple(self.surface) self.apple1.image =", "Make the apple become two self.apple1.draw() self.apple2.draw() self.display_score() pygame.display.flip() # for the screen", "self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple2.move() self.apple2.draw() # todo make these sankes", "== False: for i in range(3, self.snake2.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\")", "x1 >= x2 and x1 < x2 + SIZE: if y1 >= y2", "y < 0: print(\"is collide\") return True print(\"not collide\") return False def render_background(self):", "if self.snake1.dead == False and self.snake2.dead == False: for i in range(self.snake2.length): if", "and x1 < x2 + SIZE: if y1 >= y2 and y1 <", "if self.snake1.dead == True and self.snake2.dead == True: print(\"default true ?? \") raise", "1000 * i self.snake2.y[i] = -10000000000 - 1000 * i def display_score(self): font", "-1): self.x[i] = self.x[i - 1] self.y[i] = self.y[i - 1] # update", "== False and self.snake2.dead == False: for i in range(self.snake2.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0],", "self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True # TODO if both's head eats it becomes", "{self.snake1.length + self.snake2.length}\", True, WHITE_COLOR) self.surface.blit(line1, (200, 300)) # blit(source, postion) line2 =", "import random SIZE = 40 BACKGROUND_COLOR = (110, 110, 5) WHITE_COLOR = (255,", "self.surface = pygame.display.set_mode((1000, 800)) self.snake1 = Snake(self.surface) # for the yellow one. self.snake2", "self.snake2.increase_length() self.apple2.move() self.apple2.draw() # todo make these sankes over the window # snake", "-10000000000 - 1000 * i def display_score(self): font = pygame.font.SysFont('arial', 30) score =", "# TODO if snake1's head eats snake2 it becomes bigger if self.snake1.dead ==", "- 1000 * i if self.snake2.dead == False: if self.collide_boundaries(self.snake2.x[0], self.snake2.y[0]): self.play_sound(\"crash\") self.snake2.dead", "Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple2 = Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() def is_collision(self, x1,", "1] self.y[i] = self.y[i - 1] # update head if self.direction == 'left':", "= Snake(self.surface) # for the yellow one. self.snake2 = Snake(self.surface) # for the", "if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake2.increase_length() self.apple2.move() self.apple2.draw() # todo make these", "font.render(f\"Score: {self.snake1.length + self.snake2.length}\", True, (200, 200, 200)) self.surface.blit(score, (850, 10)) def show_game_over(self):", "i in range(self.length): self.parent_screen.blit(self.image, (self.x[i], self.y[i])) print(self.x[i], self.y[i]) pygame.display.update() def increase_length(self): self.length +=", "over the window # snake colliding with itself if self.snake1.dead == False: for", "= parent_screen self.x = random.randint(1, 24) * SIZE self.y = random.randint(1, 19) *", "in range(self.length): self.parent_screen.blit(self.image, (self.x[i], self.y[i])) print(self.x[i], self.y[i]) pygame.display.update() def increase_length(self): self.length += 1", "self.apple2 = Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() self.apple2.draw() def play_background_music(self): # todo spend times", "'down' def walk(self): # update body for i in range(self.length - 1, 0,", "= Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple2 = Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() def is_collision(self,", "Apple: def __init__(self, parent_screen): self.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.parent_screen = parent_screen self.x = random.randint(1,", "10)) def show_game_over(self): self.render_background() font = pygame.font.SysFont(\"arial\", 30) line1 = font.render(f\"Game is over!", "self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True if self.snake2.dead == False: for i in", "* SIZE self.y = random.randint(1, 19) * SIZE class Snake: def __init__(self, parent_screen):", "print(\"todo recover\") def reset(self): self.snake1 = Snake(self.surface) self.snake2 = Snake(self.surface) self.snake1.dead = False", "print(\"is collide\") return True if y > 800 or y < 0: print(\"is", "= 'down' def walk(self): # update body for i in range(self.length - 1,", "> 800 or y < 0: print(\"is collide\") return True print(\"not collide\") return", "TODO if snake2's head eats snake1 it becomes bigger if self.snake1.dead == False", "pygame.mixer.Sound.play(pygame.mixer.Sound(\"resouces/ding.mp3\")) print(\"todo recover\") def reset(self): self.snake1 = Snake(self.surface) self.snake2 = Snake(self.surface) self.snake1.dead =", "blue one. # for initialize the two snakes's pictures. self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image", "Enter. To exit press Escape!\", True, WHITE_COLOR) self.surface.blit(line2, (200, 350)) pygame.mixer.music.pause() pygame.display.flip() def", "if y1 >= y2 and y1 < y2 + SIZE: return True return", "__init__(self, parent_screen): self.parent_screen = parent_screen self.image = pygame.image.load(\"resources/block.jpg\").convert() self.direction = 'down' self.length =", "sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\ding.mp3\") pygame.mixer.Sound.play(sound) # pygame.mixer.Sound.play(pygame.mixer.Sound(\"resouces/ding.mp3\")) print(\"todo recover\") def reset(self): self.snake1 = Snake(self.surface)", "TODO if both's head eats it becomes bigger # snake colliding with the", "self.snake1.dead = True if self.snake2.dead == False: for i in range(3, self.snake2.length): if", "self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True # TODO if snake2's head", "x > 1000 or x < 0: print(\"is collide\") return True if y", "200)) self.surface.blit(score, (850, 10)) def show_game_over(self): self.render_background() font = pygame.font.SysFont(\"arial\", 30) line1 =", "= -10000000000 - 1000 * i self.snake2.y[i] = -10000000000 - 1000 * i", "self.snake2.draw() self.snake1.dead = False self.snake2.dead = False # todo for the two apples", "self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead = True # TODO if snake2's head eats snake1", "And Apple Game\") pygame.mixer.init() self.play_background_music() self.surface = pygame.display.set_mode((1000, 800)) self.snake1 = Snake(self.surface) #", "self.snake1.dead == False: self.snake1.walk() print(\"DEKU ALIVE\") if self.snake2.dead == False: self.snake2.walk() print(\"BAKUGOU ALIVE\")", "TODO if snake1's head eats snake2 it becomes bigger if self.snake1.dead == False", "# todo spend times read docs pygame.mixer.music.load('resources/My_Hero_Academy_OP.mp3') pygame.mixer.music.play(-1, 0) def play_sound(self, sound_name): if", "spend times read docs pygame.mixer.music.load('resources/My_Hero_Academy_OP.mp3') pygame.mixer.music.play(-1, 0) def play_sound(self, sound_name): if sound_name ==", "self.play_sound(\"crash\") self.snake1.dead = True if self.snake2.dead == False: for i in range(3, self.snake2.length):", "except Exception as e: print(e) print(\"raise exception\") self.show_game_over() pause = True self.reset() #", "collide\") return True print(\"not collide\") return False def render_background(self): bg = pygame.image.load(\"resources/background_hero.jpg\") self.surface.blit(bg,", "self.apple2.draw() # todo make these sankes over the window # snake colliding with", "(255, 255, 255) class Apple: def __init__(self, parent_screen): self.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.parent_screen =", "pygame.display.flip() def move(self): self.x = random.randint(1, 24) * SIZE self.y = random.randint(1, 19)", "== False: for i in range(3, self.snake1.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\")", "False: for i in range(3, self.snake1.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead", "y): if x > 1000 or x < 0: print(\"is collide\") return True", "if not pause: # for the first snake if self.snake1.dead == False: if", "False: for i in range(self.snake2.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\") self.snake2.dead =", "== False: if self.collide_boundaries(self.snake2.x[0], self.snake2.y[0]): self.play_sound(\"crash\") self.snake2.dead = True for i in range(self.snake2.length):", "font = pygame.font.SysFont(\"arial\", 30) line1 = font.render(f\"Game is over! Your score is {self.snake1.length", "range(self.snake1.length): self.snake1.x[i] = -10000000000 - 1000 * i self.snake1.y[i] = -10000000000 - 1000", "[40] self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() self.apple1 = Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert()", "itself if self.snake1.dead == False: for i in range(3, self.snake1.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0],", "__init__(self, parent_screen): self.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.parent_screen = parent_screen self.x = random.randint(1, 24) *", "again press Enter. To exit press Escape!\", True, WHITE_COLOR) self.surface.blit(line2, (200, 350)) pygame.mixer.music.pause()", "update head if self.direction == 'left': self.x[0] -= SIZE elif self.direction == 'right':", "self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple2 = Apple(self.surface) self.apple2.image = pygame.image.load(\"resources/gold_apple.jpg\").convert() def is_collision(self, x1, y1,", "self.snake1.dead == False: if self.collide_boundaries(self.snake1.x[0], self.snake1.y[0]): self.play_sound(\"crash\") self.snake1.dead = True for i in", "self.snake2.x = [40] self.snake2.y = [40] self.snake1.image = pygame.image.load(\"resources/deku.jpg\").convert() self.snake2.image = pygame.image.load(\"resources/bakugou.jpg\").convert() self.apple1", "play(self): self.render_background() # for the two snake # if self.snake2.dead == True: #", "= pygame.image.load(\"resources/gold_apple.jpg\").convert() self.apple2.draw() def play_background_music(self): # todo spend times read docs pygame.mixer.music.load('resources/My_Hero_Academy_OP.mp3') pygame.mixer.music.play(-1,", "self.apple2.move() self.apple2.draw() # todo make these sankes over the window # snake colliding", "# todo make these sankes over the window # snake colliding with itself", "if event.key == K_ESCAPE: running = False if event.key == K_RETURN: pygame.mixer.music.unpause() pause", "two apples self.apple1 = Apple(self.surface) self.apple1.image = pygame.image.load(\"resources/The_apple_everyone_want.jpg\").convert() self.apple1.draw() self.apple2 = Apple(self.surface) self.apple2.image", "self.x[i] = self.x[i - 1] self.y[i] = self.y[i - 1] # update head", "self.draw() def draw(self): for i in range(self.length): self.parent_screen.blit(self.image, (self.x[i], self.y[i])) print(self.x[i], self.y[i]) pygame.display.update()", "i self.snake1.y[i] = -10000000000 - 1000 * i if self.snake2.dead == False: if", "event.key == K_d: self.snake2.move_right() if event.key == K_w: self.snake2.move_up() if event.key == K_s:", "elif self.direction == 'down': self.y[0] += SIZE self.draw() def draw(self): for i in", "= False self.snake1.x = [920] self.snake1.y = [40] self.snake2.x = [40] self.snake2.y =", "font.render(\"To play again press Enter. To exit press Escape!\", True, WHITE_COLOR) self.surface.blit(line2, (200,", "self.direction = 'down' def walk(self): # update body for i in range(self.length -", "= pygame.font.SysFont('arial', 30) score = font.render(f\"Score: {self.snake1.length + self.snake2.length}\", True, (200, 200, 200))", "self.x = random.randint(1, 24) * SIZE self.y = random.randint(1, 19) * SIZE class", "self.y[0] += SIZE self.draw() def draw(self): for i in range(self.length): self.parent_screen.blit(self.image, (self.x[i], self.y[i]))", "= False if not pause: # for the first snake if self.snake1.dead ==", "if event.key == K_RIGHT: self.snake1.move_right() if event.key == K_UP: self.snake1.move_up() if event.key ==", "def move(self): self.x = random.randint(1, 24) * SIZE self.y = random.randint(1, 19) *", "= pygame.image.load(\"resources/block.jpg\").convert() self.direction = 'down' self.length = 1 self.x = [SIZE] # SIZE", "__init__(self): pygame.init() pygame.display.set_caption(\"Codebasics Snake And Apple Game\") pygame.mixer.init() self.play_background_music() self.surface = pygame.display.set_mode((1000, 800))", "self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True # TODO if both's head", "= pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\crash.mp3\") pygame.mixer.Sound.play(sound) elif sound_name == \"ding\": sound = pygame.mixer.Sound(r\"D:\\Python\\Master_Python\\Python_Snake_games\\resources\\ding.mp3\") pygame.mixer.Sound.play(sound) # pygame.mixer.Sound.play(pygame.mixer.Sound(\"resouces/ding.mp3\"))", "self.snake1.move_down() # for the second snake if self.snake2.dead == False: if event.key ==", "Snake(self.surface) self.snake1.dead = False self.snake2.dead = False self.snake1.x = [920] self.snake1.y = [40]", "pygame.display.flip() # for the screen update # snake colliding with apple if self.is_collision(self.snake1.x[0],", "if self.collide_boundaries(self.snake2.x[0], self.snake2.y[0]): self.play_sound(\"crash\") self.snake2.dead = True for i in range(self.snake2.length): self.snake2.x[i] =", "== 'left': self.x[0] -= SIZE elif self.direction == 'right': self.x[0] += SIZE elif", "< 0: print(\"is collide\") return True print(\"not collide\") return False def render_background(self): bg", "+= 1 self.x.append(-1) self.y.append(-1) class Game: def __init__(self): pygame.init() pygame.display.set_caption(\"Codebasics Snake And Apple", "self.play_sound(\"ding\") self.snake2.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.apple2.x, self.apple2.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple2.move() self.apple2.draw()", "self.snake2 = Snake(self.surface) # for the blue one. # for initialize the two", "self.direction == 'left': self.x[0] -= SIZE elif self.direction == 'right': self.x[0] += SIZE", "and self.snake2.dead == False: for i in range(self.snake1.length): if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.snake1.x[i], self.snake1.y[i]):", "self.direction = 'left' def move_right(self): self.direction = 'right' def move_up(self): self.direction = 'up'", "def render_background(self): bg = pygame.image.load(\"resources/background_hero.jpg\") self.surface.blit(bg, (0, 0)) def play(self): self.render_background() # for", "# TODO: Make the apple become two self.apple1.draw() self.apple2.draw() self.display_score() pygame.display.flip() # for", "self.snake2.dead = False # todo for the two apples self.apple1 = Apple(self.surface) self.apple1.image", "self.snake2.y[0], self.snake1.x[i], self.snake1.y[i]): self.play_sound(\"crash\") self.snake1.dead = True # TODO if both's head eats", "e: print(e) print(\"raise exception\") self.show_game_over() pause = True self.reset() # self.snake.walk() time.sleep(.1) if", "pygame.image.load(\"resources/gold_apple.jpg\").convert() def is_collision(self, x1, y1, x2, y2): if x1 >= x2 and x1", "if self.snake2.dead == False: if event.key == K_a: self.snake2.move_left() if event.key == K_d:", "sankes over the window # snake colliding with itself if self.snake1.dead == False:", "collide\") return True if y > 800 or y < 0: print(\"is collide\")", "self.snake1.dead = False self.snake2.dead = False # todo for the two apples self.apple1", "self.collide_boundaries(self.snake2.x[0], self.snake2.y[0]): self.play_sound(\"crash\") self.snake2.dead = True for i in range(self.snake2.length): self.snake2.x[i] = -10000000000", "False while running: for event in pygame.event.get(): if event.type == KEYDOWN: if event.key", "pause = False if not pause: # for the first snake if self.snake1.dead", "'left': self.x[0] -= SIZE elif self.direction == 'right': self.x[0] += SIZE elif self.direction", "pygame.display.flip() def run(self): running = True pause = False while running: for event", "self.snake2.dead == False: for i in range(self.snake2.length): if self.is_collision(self.snake1.x[0], self.snake1.y[0], self.snake2.x[i], self.snake2.y[i]): self.play_sound(\"crash\")", "x2 and x1 < x2 + SIZE: if y1 >= y2 and y1", "self.snake1.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\") self.snake1.increase_length() self.apple1.move() self.apple1.draw() if self.is_collision(self.snake2.x[0], self.snake2.y[0], self.apple1.x, self.apple1.y): self.play_sound(\"ding\")", "K_ESCAPE: running = False if event.key == K_RETURN: pygame.mixer.music.unpause() pause = False if" ]
[ "i, col in enumerate(columns): cdx = (np.max(df[col]) - np.min(df[col]))/(n[i] - 1) cube_df[col] =", "np.sort(n_in)[-cut_idx] mask = (n_in >= thresh) centers['weights'] = n_in/np.sum(n_in[mask]) centers = centers[mask] centers", "re import latbin def lineify(df, n, column): return cubeify(df, [n], [column]) def squareify(df,", "the column to sum up for each bin returns cube: ndarray the \"cubeified\"", "ycol]) def cubeify(df, n, columns, target=\"weights\"): \"\"\"bins up a dataframe into a densely", "= min(len(centers), npts_out) thresh = np.sort(n_in)[-cut_idx] mask = (n_in >= thresh) centers['weights'] =", "column to sum up for each bin returns cube: ndarray the \"cubeified\" data", "return cubeify(df, [nx, ny], [xcol, ycol]) def cubeify(df, n, columns, target=\"weights\"): \"\"\"bins up", "return centers class PointFilter(object): \"\"\"PointFilter handles efficiently calculating distances to a set of", "xcol, ycol): return cubeify(df, [nx, ny], [xcol, ycol]) def cubeify(df, n, columns, target=\"weights\"):", "target: column name the column to sum up for each bin returns cube:", "def compress_cloud(df, bin_size=1., npts_out=250): \"\"\"compress a large number of points into a small", "col) is None: colnames.append(col) colnames = np.array(colnames) centers = centers[colnames].copy() return centers class", "sigma_vec, copy=True, ): \"\"\" point_cloud: pandas DataFrame the points in this filter filtered_columns:", "dataframe will not be made. \"\"\" if copy: point_cloud = point_cloud.copy() self.point_cloud =", "centers.reset_index() colnames = [] for col in centers.columns: if re.match('q_', col) is None:", "self.filtered_columns = filtered_columns self.sigma_vec = sigma_vec def get_weights(self, point_cloud): pdata = point_cloud[self.filtered_columns] filter_pts", "scale to use along each dimension copy: bool if False a copy of", "for col in centers.columns: if re.match('q_', col) is None: colnames.append(col) colnames = np.array(colnames)", "list the column names to filter on sigma_vec: ndarray the distance scale to", "pandas DataFrame the points in this filter filtered_columns: list the column names to", "sum up for each bin returns cube: ndarray the \"cubeified\" data \"\"\" cube_df", "for i, col in enumerate(columns): cdx = (np.max(df[col]) - np.min(df[col]))/(n[i] - 1) cube_df[col]", "ndarray the \"cubeified\" data \"\"\" cube_df = pd.DataFrame() cube_df[target] = df[target] for i,", "distances to a set of points in many dimensions. \"\"\" def __init__( self,", "= df[target] for i, col in enumerate(columns): cdx = (np.max(df[col]) - np.min(df[col]))/(n[i] -", "a set of points in many dimensions. \"\"\" def __init__( self, point_cloud, filtered_columns,", "= (n_in >= thresh) centers['weights'] = n_in/np.sum(n_in[mask]) centers = centers[mask] centers = centers.reset_index()", "1.0/len(point_cloud), len(point_cloud)) self.filtered_columns = filtered_columns self.sigma_vec = sigma_vec def get_weights(self, point_cloud): pdata =", "n, columns, target=\"weights\"): \"\"\"bins up a dataframe into a densely sampled cube n:", "target=\"weights\"): \"\"\"bins up a dataframe into a densely sampled cube n: list the", "a copy of the input dataframe will not be made. \"\"\" if copy:", "on sigma_vec: ndarray the distance scale to use along each dimension copy: bool", "= np.zeros(n) for ind in gsum.index: out_cube[ind] = gsum.ix[ind] return out_cube def compress_cloud(df,", "__init__( self, point_cloud, filtered_columns, sigma_vec, copy=True, ): \"\"\" point_cloud: pandas DataFrame the points", "for each bin returns cube: ndarray the \"cubeified\" data \"\"\" cube_df = pd.DataFrame()", "centers['weights'] = n_in/np.sum(n_in[mask]) centers = centers[mask] centers = centers.reset_index() colnames = [] for", "to bin on target: column name the column to sum up for each", "centers[mask] centers = centers.reset_index() colnames = [] for col in centers.columns: if re.match('q_',", "not \"weights\" in self.point_cloud.columns: self.point_cloud[\"weights\"] = np.repeat( 1.0/len(point_cloud), len(point_cloud)) self.filtered_columns = filtered_columns self.sigma_vec", "self.point_cloud[self.filtered_columns] sim_matrix = latbin.matching.sparse_distance_matrix( pdata/self.sigma_vec, filter_pts/self.sigma_vec, ) weights = sim_matrix * self.point_cloud[\"weights\"].values return", "[] for col in centers.columns: if re.match('q_', col) is None: colnames.append(col) colnames =", "efficiently calculating distances to a set of points in many dimensions. \"\"\" def", "columns: list the column names to bin on target: column name the column", "n: list the dimensions of the cube columns: list the column names to", "the input dataframe will not be made. \"\"\" if copy: point_cloud = point_cloud.copy()", "ny], [xcol, ycol]) def cubeify(df, n, columns, target=\"weights\"): \"\"\"bins up a dataframe into", "if re.match('q_', col) is None: colnames.append(col) colnames = np.array(colnames) centers = centers[colnames].copy() return", "= cube_df.groupby(columns)[target].sum() out_cube = np.zeros(n) for ind in gsum.index: out_cube[ind] = gsum.ix[ind] return", "columns, target=\"weights\"): \"\"\"bins up a dataframe into a densely sampled cube n: list", "(np.max(df[col]) - np.min(df[col]))/(n[i] - 1) cube_df[col] = np.around(df[col]/cdx).astype(int) cube_df[col] -= np.min(cube_df[col]) gsum =", "names to bin on target: column name the column to sum up for", "get_weights(self, point_cloud): pdata = point_cloud[self.filtered_columns] filter_pts = self.point_cloud[self.filtered_columns] sim_matrix = latbin.matching.sparse_distance_matrix( pdata/self.sigma_vec, filter_pts/self.sigma_vec,", "class PointFilter(object): \"\"\"PointFilter handles efficiently calculating distances to a set of points in", "def lineify(df, n, column): return cubeify(df, [n], [column]) def squareify(df, nx, ny, xcol,", "self.point_cloud[\"weights\"] = np.repeat( 1.0/len(point_cloud), len(point_cloud)) self.filtered_columns = filtered_columns self.sigma_vec = sigma_vec def get_weights(self,", "representative sample via multidimensional histogramming and averaging. \"\"\" Aparam = latbin.ALattice(len(df.columns), scale=bin_size) pts", "col in centers.columns: if re.match('q_', col) is None: colnames.append(col) colnames = np.array(colnames) centers", "input dataframe will not be made. \"\"\" if copy: point_cloud = point_cloud.copy() self.point_cloud", "the \"cubeified\" data \"\"\" cube_df = pd.DataFrame() cube_df[target] = df[target] for i, col", "large number of points into a small representative sample via multidimensional histogramming and", "\"\"\" Aparam = latbin.ALattice(len(df.columns), scale=bin_size) pts = Aparam.bin(df) centers = pts.mean() n_in =", "up a dataframe into a densely sampled cube n: list the dimensions of", "= sigma_vec def get_weights(self, point_cloud): pdata = point_cloud[self.filtered_columns] filter_pts = self.point_cloud[self.filtered_columns] sim_matrix =", "sim_matrix = latbin.matching.sparse_distance_matrix( pdata/self.sigma_vec, filter_pts/self.sigma_vec, ) weights = sim_matrix * self.point_cloud[\"weights\"].values return weights", "many dimensions. \"\"\" def __init__( self, point_cloud, filtered_columns, sigma_vec, copy=True, ): \"\"\" point_cloud:", "filter filtered_columns: list the column names to filter on sigma_vec: ndarray the distance", "np.around(df[col]/cdx).astype(int) cube_df[col] -= np.min(cube_df[col]) gsum = cube_df.groupby(columns)[target].sum() out_cube = np.zeros(n) for ind in", "out_cube[ind] = gsum.ix[ind] return out_cube def compress_cloud(df, bin_size=1., npts_out=250): \"\"\"compress a large number", "[column]) def squareify(df, nx, ny, xcol, ycol): return cubeify(df, [nx, ny], [xcol, ycol])", "points into a small representative sample via multidimensional histogramming and averaging. \"\"\" Aparam", "cube_df = pd.DataFrame() cube_df[target] = df[target] for i, col in enumerate(columns): cdx =", "[xcol, ycol]) def cubeify(df, n, columns, target=\"weights\"): \"\"\"bins up a dataframe into a", "[n], [column]) def squareify(df, nx, ny, xcol, ycol): return cubeify(df, [nx, ny], [xcol,", "in gsum.index: out_cube[ind] = gsum.ix[ind] return out_cube def compress_cloud(df, bin_size=1., npts_out=250): \"\"\"compress a", "as np import pandas as pd import re import latbin def lineify(df, n,", "ycol): return cubeify(df, [nx, ny], [xcol, ycol]) def cubeify(df, n, columns, target=\"weights\"): \"\"\"bins", "lineify(df, n, column): return cubeify(df, [n], [column]) def squareify(df, nx, ny, xcol, ycol):", "filter on sigma_vec: ndarray the distance scale to use along each dimension copy:", "to use along each dimension copy: bool if False a copy of the", "latbin.ALattice(len(df.columns), scale=bin_size) pts = Aparam.bin(df) centers = pts.mean() n_in = pts.size() cut_idx =", "latbin def lineify(df, n, column): return cubeify(df, [n], [column]) def squareify(df, nx, ny,", "each bin returns cube: ndarray the \"cubeified\" data \"\"\" cube_df = pd.DataFrame() cube_df[target]", "ind in gsum.index: out_cube[ind] = gsum.ix[ind] return out_cube def compress_cloud(df, bin_size=1., npts_out=250): \"\"\"compress", "= pts.mean() n_in = pts.size() cut_idx = min(len(centers), npts_out) thresh = np.sort(n_in)[-cut_idx] mask", "= np.array(colnames) centers = centers[colnames].copy() return centers class PointFilter(object): \"\"\"PointFilter handles efficiently calculating", "filtered_columns self.sigma_vec = sigma_vec def get_weights(self, point_cloud): pdata = point_cloud[self.filtered_columns] filter_pts = self.point_cloud[self.filtered_columns]", "centers = centers[colnames].copy() return centers class PointFilter(object): \"\"\"PointFilter handles efficiently calculating distances to", "import pandas as pd import re import latbin def lineify(df, n, column): return", "as pd import re import latbin def lineify(df, n, column): return cubeify(df, [n],", "number of points into a small representative sample via multidimensional histogramming and averaging.", "ny, xcol, ycol): return cubeify(df, [nx, ny], [xcol, ycol]) def cubeify(df, n, columns,", "= pts.size() cut_idx = min(len(centers), npts_out) thresh = np.sort(n_in)[-cut_idx] mask = (n_in >=", "in centers.columns: if re.match('q_', col) is None: colnames.append(col) colnames = np.array(colnames) centers =", "a densely sampled cube n: list the dimensions of the cube columns: list", "npts_out) thresh = np.sort(n_in)[-cut_idx] mask = (n_in >= thresh) centers['weights'] = n_in/np.sum(n_in[mask]) centers", "list the dimensions of the cube columns: list the column names to bin", "pts.size() cut_idx = min(len(centers), npts_out) thresh = np.sort(n_in)[-cut_idx] mask = (n_in >= thresh)", "each dimension copy: bool if False a copy of the input dataframe will", "len(point_cloud)) self.filtered_columns = filtered_columns self.sigma_vec = sigma_vec def get_weights(self, point_cloud): pdata = point_cloud[self.filtered_columns]", "Aparam.bin(df) centers = pts.mean() n_in = pts.size() cut_idx = min(len(centers), npts_out) thresh =", "copy: point_cloud = point_cloud.copy() self.point_cloud = point_cloud if not \"weights\" in self.point_cloud.columns: self.point_cloud[\"weights\"]", "= centers[mask] centers = centers.reset_index() colnames = [] for col in centers.columns: if", "return cubeify(df, [n], [column]) def squareify(df, nx, ny, xcol, ycol): return cubeify(df, [nx,", "point_cloud): pdata = point_cloud[self.filtered_columns] filter_pts = self.point_cloud[self.filtered_columns] sim_matrix = latbin.matching.sparse_distance_matrix( pdata/self.sigma_vec, filter_pts/self.sigma_vec, )", "n_in/np.sum(n_in[mask]) centers = centers[mask] centers = centers.reset_index() colnames = [] for col in", "out_cube def compress_cloud(df, bin_size=1., npts_out=250): \"\"\"compress a large number of points into a", "copy: bool if False a copy of the input dataframe will not be", "(n_in >= thresh) centers['weights'] = n_in/np.sum(n_in[mask]) centers = centers[mask] centers = centers.reset_index() colnames", "made. \"\"\" if copy: point_cloud = point_cloud.copy() self.point_cloud = point_cloud if not \"weights\"", ">= thresh) centers['weights'] = n_in/np.sum(n_in[mask]) centers = centers[mask] centers = centers.reset_index() colnames =", "cube n: list the dimensions of the cube columns: list the column names", "bool if False a copy of the input dataframe will not be made.", "): \"\"\" point_cloud: pandas DataFrame the points in this filter filtered_columns: list the", "the dimensions of the cube columns: list the column names to bin on", "of the cube columns: list the column names to bin on target: column", "cdx = (np.max(df[col]) - np.min(df[col]))/(n[i] - 1) cube_df[col] = np.around(df[col]/cdx).astype(int) cube_df[col] -= np.min(cube_df[col])", "filtered_columns: list the column names to filter on sigma_vec: ndarray the distance scale", "centers = centers.reset_index() colnames = [] for col in centers.columns: if re.match('q_', col)", "- np.min(df[col]))/(n[i] - 1) cube_df[col] = np.around(df[col]/cdx).astype(int) cube_df[col] -= np.min(cube_df[col]) gsum = cube_df.groupby(columns)[target].sum()", "enumerate(columns): cdx = (np.max(df[col]) - np.min(df[col]))/(n[i] - 1) cube_df[col] = np.around(df[col]/cdx).astype(int) cube_df[col] -=", "= n_in/np.sum(n_in[mask]) centers = centers[mask] centers = centers.reset_index() colnames = [] for col", "cube: ndarray the \"cubeified\" data \"\"\" cube_df = pd.DataFrame() cube_df[target] = df[target] for", "of the input dataframe will not be made. \"\"\" if copy: point_cloud =", "if not \"weights\" in self.point_cloud.columns: self.point_cloud[\"weights\"] = np.repeat( 1.0/len(point_cloud), len(point_cloud)) self.filtered_columns = filtered_columns", "sigma_vec def get_weights(self, point_cloud): pdata = point_cloud[self.filtered_columns] filter_pts = self.point_cloud[self.filtered_columns] sim_matrix = latbin.matching.sparse_distance_matrix(", "the points in this filter filtered_columns: list the column names to filter on", "mask = (n_in >= thresh) centers['weights'] = n_in/np.sum(n_in[mask]) centers = centers[mask] centers =", "cubeify(df, [nx, ny], [xcol, ycol]) def cubeify(df, n, columns, target=\"weights\"): \"\"\"bins up a", "= latbin.ALattice(len(df.columns), scale=bin_size) pts = Aparam.bin(df) centers = pts.mean() n_in = pts.size() cut_idx", "self, point_cloud, filtered_columns, sigma_vec, copy=True, ): \"\"\" point_cloud: pandas DataFrame the points in", "bin_size=1., npts_out=250): \"\"\"compress a large number of points into a small representative sample", "pd import re import latbin def lineify(df, n, column): return cubeify(df, [n], [column])", "is None: colnames.append(col) colnames = np.array(colnames) centers = centers[colnames].copy() return centers class PointFilter(object):", "distance scale to use along each dimension copy: bool if False a copy", "centers class PointFilter(object): \"\"\"PointFilter handles efficiently calculating distances to a set of points", "\"\"\"PointFilter handles efficiently calculating distances to a set of points in many dimensions.", "dimensions of the cube columns: list the column names to bin on target:", "squareify(df, nx, ny, xcol, ycol): return cubeify(df, [nx, ny], [xcol, ycol]) def cubeify(df,", "= point_cloud.copy() self.point_cloud = point_cloud if not \"weights\" in self.point_cloud.columns: self.point_cloud[\"weights\"] = np.repeat(", "self.point_cloud = point_cloud if not \"weights\" in self.point_cloud.columns: self.point_cloud[\"weights\"] = np.repeat( 1.0/len(point_cloud), len(point_cloud))", "\"\"\" point_cloud: pandas DataFrame the points in this filter filtered_columns: list the column", "of points in many dimensions. \"\"\" def __init__( self, point_cloud, filtered_columns, sigma_vec, copy=True,", "into a densely sampled cube n: list the dimensions of the cube columns:", "colnames = np.array(colnames) centers = centers[colnames].copy() return centers class PointFilter(object): \"\"\"PointFilter handles efficiently", "import numpy as np import pandas as pd import re import latbin def", "names to filter on sigma_vec: ndarray the distance scale to use along each", "cube_df[col] = np.around(df[col]/cdx).astype(int) cube_df[col] -= np.min(cube_df[col]) gsum = cube_df.groupby(columns)[target].sum() out_cube = np.zeros(n) for", "import re import latbin def lineify(df, n, column): return cubeify(df, [n], [column]) def", "= (np.max(df[col]) - np.min(df[col]))/(n[i] - 1) cube_df[col] = np.around(df[col]/cdx).astype(int) cube_df[col] -= np.min(cube_df[col]) gsum", "thresh = np.sort(n_in)[-cut_idx] mask = (n_in >= thresh) centers['weights'] = n_in/np.sum(n_in[mask]) centers =", "colnames.append(col) colnames = np.array(colnames) centers = centers[colnames].copy() return centers class PointFilter(object): \"\"\"PointFilter handles", "not be made. \"\"\" if copy: point_cloud = point_cloud.copy() self.point_cloud = point_cloud if", "centers = pts.mean() n_in = pts.size() cut_idx = min(len(centers), npts_out) thresh = np.sort(n_in)[-cut_idx]", "centers.columns: if re.match('q_', col) is None: colnames.append(col) colnames = np.array(colnames) centers = centers[colnames].copy()", "np import pandas as pd import re import latbin def lineify(df, n, column):", "nx, ny, xcol, ycol): return cubeify(df, [nx, ny], [xcol, ycol]) def cubeify(df, n,", "col in enumerate(columns): cdx = (np.max(df[col]) - np.min(df[col]))/(n[i] - 1) cube_df[col] = np.around(df[col]/cdx).astype(int)", "name the column to sum up for each bin returns cube: ndarray the", "centers[colnames].copy() return centers class PointFilter(object): \"\"\"PointFilter handles efficiently calculating distances to a set", "pd.DataFrame() cube_df[target] = df[target] for i, col in enumerate(columns): cdx = (np.max(df[col]) -", "cube_df.groupby(columns)[target].sum() out_cube = np.zeros(n) for ind in gsum.index: out_cube[ind] = gsum.ix[ind] return out_cube", "self.point_cloud.columns: self.point_cloud[\"weights\"] = np.repeat( 1.0/len(point_cloud), len(point_cloud)) self.filtered_columns = filtered_columns self.sigma_vec = sigma_vec def", "a small representative sample via multidimensional histogramming and averaging. \"\"\" Aparam = latbin.ALattice(len(df.columns),", "\"\"\"compress a large number of points into a small representative sample via multidimensional", "centers = centers[mask] centers = centers.reset_index() colnames = [] for col in centers.columns:", "point_cloud, filtered_columns, sigma_vec, copy=True, ): \"\"\" point_cloud: pandas DataFrame the points in this", "pts.mean() n_in = pts.size() cut_idx = min(len(centers), npts_out) thresh = np.sort(n_in)[-cut_idx] mask =", "the cube columns: list the column names to bin on target: column name", "= point_cloud[self.filtered_columns] filter_pts = self.point_cloud[self.filtered_columns] sim_matrix = latbin.matching.sparse_distance_matrix( pdata/self.sigma_vec, filter_pts/self.sigma_vec, ) weights =", "re.match('q_', col) is None: colnames.append(col) colnames = np.array(colnames) centers = centers[colnames].copy() return centers", "def get_weights(self, point_cloud): pdata = point_cloud[self.filtered_columns] filter_pts = self.point_cloud[self.filtered_columns] sim_matrix = latbin.matching.sparse_distance_matrix( pdata/self.sigma_vec,", "\"\"\" def __init__( self, point_cloud, filtered_columns, sigma_vec, copy=True, ): \"\"\" point_cloud: pandas DataFrame", "column names to filter on sigma_vec: ndarray the distance scale to use along", "calculating distances to a set of points in many dimensions. \"\"\" def __init__(", "PointFilter(object): \"\"\"PointFilter handles efficiently calculating distances to a set of points in many", "sample via multidimensional histogramming and averaging. \"\"\" Aparam = latbin.ALattice(len(df.columns), scale=bin_size) pts =", "handles efficiently calculating distances to a set of points in many dimensions. \"\"\"", "dimension copy: bool if False a copy of the input dataframe will not", "the column names to filter on sigma_vec: ndarray the distance scale to use", "if False a copy of the input dataframe will not be made. \"\"\"", "compress_cloud(df, bin_size=1., npts_out=250): \"\"\"compress a large number of points into a small representative", "= np.sort(n_in)[-cut_idx] mask = (n_in >= thresh) centers['weights'] = n_in/np.sum(n_in[mask]) centers = centers[mask]", "column names to bin on target: column name the column to sum up", "[nx, ny], [xcol, ycol]) def cubeify(df, n, columns, target=\"weights\"): \"\"\"bins up a dataframe", "cubeify(df, [n], [column]) def squareify(df, nx, ny, xcol, ycol): return cubeify(df, [nx, ny],", "points in this filter filtered_columns: list the column names to filter on sigma_vec:", "on target: column name the column to sum up for each bin returns", "ndarray the distance scale to use along each dimension copy: bool if False", "DataFrame the points in this filter filtered_columns: list the column names to filter", "= np.around(df[col]/cdx).astype(int) cube_df[col] -= np.min(cube_df[col]) gsum = cube_df.groupby(columns)[target].sum() out_cube = np.zeros(n) for ind", "np.min(df[col]))/(n[i] - 1) cube_df[col] = np.around(df[col]/cdx).astype(int) cube_df[col] -= np.min(cube_df[col]) gsum = cube_df.groupby(columns)[target].sum() out_cube", "to sum up for each bin returns cube: ndarray the \"cubeified\" data \"\"\"", "histogramming and averaging. \"\"\" Aparam = latbin.ALattice(len(df.columns), scale=bin_size) pts = Aparam.bin(df) centers =", "cut_idx = min(len(centers), npts_out) thresh = np.sort(n_in)[-cut_idx] mask = (n_in >= thresh) centers['weights']", "= pd.DataFrame() cube_df[target] = df[target] for i, col in enumerate(columns): cdx = (np.max(df[col])", "use along each dimension copy: bool if False a copy of the input", "= [] for col in centers.columns: if re.match('q_', col) is None: colnames.append(col) colnames", "points in many dimensions. \"\"\" def __init__( self, point_cloud, filtered_columns, sigma_vec, copy=True, ):", "import latbin def lineify(df, n, column): return cubeify(df, [n], [column]) def squareify(df, nx,", "np.array(colnames) centers = centers[colnames].copy() return centers class PointFilter(object): \"\"\"PointFilter handles efficiently calculating distances", "returns cube: ndarray the \"cubeified\" data \"\"\" cube_df = pd.DataFrame() cube_df[target] = df[target]", "list the column names to bin on target: column name the column to", "in this filter filtered_columns: list the column names to filter on sigma_vec: ndarray", "pdata = point_cloud[self.filtered_columns] filter_pts = self.point_cloud[self.filtered_columns] sim_matrix = latbin.matching.sparse_distance_matrix( pdata/self.sigma_vec, filter_pts/self.sigma_vec, ) weights", "False a copy of the input dataframe will not be made. \"\"\" if", "n, column): return cubeify(df, [n], [column]) def squareify(df, nx, ny, xcol, ycol): return", "min(len(centers), npts_out) thresh = np.sort(n_in)[-cut_idx] mask = (n_in >= thresh) centers['weights'] = n_in/np.sum(n_in[mask])", "bin on target: column name the column to sum up for each bin", "\"cubeified\" data \"\"\" cube_df = pd.DataFrame() cube_df[target] = df[target] for i, col in", "out_cube = np.zeros(n) for ind in gsum.index: out_cube[ind] = gsum.ix[ind] return out_cube def", "cube_df[target] = df[target] for i, col in enumerate(columns): cdx = (np.max(df[col]) - np.min(df[col]))/(n[i]", "def squareify(df, nx, ny, xcol, ycol): return cubeify(df, [nx, ny], [xcol, ycol]) def", "= centers.reset_index() colnames = [] for col in centers.columns: if re.match('q_', col) is", "point_cloud if not \"weights\" in self.point_cloud.columns: self.point_cloud[\"weights\"] = np.repeat( 1.0/len(point_cloud), len(point_cloud)) self.filtered_columns =", "df[target] for i, col in enumerate(columns): cdx = (np.max(df[col]) - np.min(df[col]))/(n[i] - 1)", "<gh_stars>0 import numpy as np import pandas as pd import re import latbin", "= self.point_cloud[self.filtered_columns] sim_matrix = latbin.matching.sparse_distance_matrix( pdata/self.sigma_vec, filter_pts/self.sigma_vec, ) weights = sim_matrix * self.point_cloud[\"weights\"].values", "bin returns cube: ndarray the \"cubeified\" data \"\"\" cube_df = pd.DataFrame() cube_df[target] =", "copy=True, ): \"\"\" point_cloud: pandas DataFrame the points in this filter filtered_columns: list", "1) cube_df[col] = np.around(df[col]/cdx).astype(int) cube_df[col] -= np.min(cube_df[col]) gsum = cube_df.groupby(columns)[target].sum() out_cube = np.zeros(n)", "column name the column to sum up for each bin returns cube: ndarray", "= np.repeat( 1.0/len(point_cloud), len(point_cloud)) self.filtered_columns = filtered_columns self.sigma_vec = sigma_vec def get_weights(self, point_cloud):", "np.zeros(n) for ind in gsum.index: out_cube[ind] = gsum.ix[ind] return out_cube def compress_cloud(df, bin_size=1.,", "sampled cube n: list the dimensions of the cube columns: list the column", "= Aparam.bin(df) centers = pts.mean() n_in = pts.size() cut_idx = min(len(centers), npts_out) thresh", "point_cloud.copy() self.point_cloud = point_cloud if not \"weights\" in self.point_cloud.columns: self.point_cloud[\"weights\"] = np.repeat( 1.0/len(point_cloud),", "a dataframe into a densely sampled cube n: list the dimensions of the", "data \"\"\" cube_df = pd.DataFrame() cube_df[target] = df[target] for i, col in enumerate(columns):", "dataframe into a densely sampled cube n: list the dimensions of the cube", "multidimensional histogramming and averaging. \"\"\" Aparam = latbin.ALattice(len(df.columns), scale=bin_size) pts = Aparam.bin(df) centers", "np.min(cube_df[col]) gsum = cube_df.groupby(columns)[target].sum() out_cube = np.zeros(n) for ind in gsum.index: out_cube[ind] =", "a large number of points into a small representative sample via multidimensional histogramming", "the distance scale to use along each dimension copy: bool if False a", "in self.point_cloud.columns: self.point_cloud[\"weights\"] = np.repeat( 1.0/len(point_cloud), len(point_cloud)) self.filtered_columns = filtered_columns self.sigma_vec = sigma_vec", "\"\"\" cube_df = pd.DataFrame() cube_df[target] = df[target] for i, col in enumerate(columns): cdx", "in enumerate(columns): cdx = (np.max(df[col]) - np.min(df[col]))/(n[i] - 1) cube_df[col] = np.around(df[col]/cdx).astype(int) cube_df[col]", "for ind in gsum.index: out_cube[ind] = gsum.ix[ind] return out_cube def compress_cloud(df, bin_size=1., npts_out=250):", "return out_cube def compress_cloud(df, bin_size=1., npts_out=250): \"\"\"compress a large number of points into", "numpy as np import pandas as pd import re import latbin def lineify(df,", "thresh) centers['weights'] = n_in/np.sum(n_in[mask]) centers = centers[mask] centers = centers.reset_index() colnames = []", "set of points in many dimensions. \"\"\" def __init__( self, point_cloud, filtered_columns, sigma_vec,", "in many dimensions. \"\"\" def __init__( self, point_cloud, filtered_columns, sigma_vec, copy=True, ): \"\"\"", "if copy: point_cloud = point_cloud.copy() self.point_cloud = point_cloud if not \"weights\" in self.point_cloud.columns:", "self.sigma_vec = sigma_vec def get_weights(self, point_cloud): pdata = point_cloud[self.filtered_columns] filter_pts = self.point_cloud[self.filtered_columns] sim_matrix", "gsum.ix[ind] return out_cube def compress_cloud(df, bin_size=1., npts_out=250): \"\"\"compress a large number of points", "= gsum.ix[ind] return out_cube def compress_cloud(df, bin_size=1., npts_out=250): \"\"\"compress a large number of", "this filter filtered_columns: list the column names to filter on sigma_vec: ndarray the", "pts = Aparam.bin(df) centers = pts.mean() n_in = pts.size() cut_idx = min(len(centers), npts_out)", "densely sampled cube n: list the dimensions of the cube columns: list the", "to a set of points in many dimensions. \"\"\" def __init__( self, point_cloud,", "will not be made. \"\"\" if copy: point_cloud = point_cloud.copy() self.point_cloud = point_cloud", "into a small representative sample via multidimensional histogramming and averaging. \"\"\" Aparam =", "cube columns: list the column names to bin on target: column name the", "gsum.index: out_cube[ind] = gsum.ix[ind] return out_cube def compress_cloud(df, bin_size=1., npts_out=250): \"\"\"compress a large", "scale=bin_size) pts = Aparam.bin(df) centers = pts.mean() n_in = pts.size() cut_idx = min(len(centers),", "sigma_vec: ndarray the distance scale to use along each dimension copy: bool if", "small representative sample via multidimensional histogramming and averaging. \"\"\" Aparam = latbin.ALattice(len(df.columns), scale=bin_size)", "point_cloud = point_cloud.copy() self.point_cloud = point_cloud if not \"weights\" in self.point_cloud.columns: self.point_cloud[\"weights\"] =", "cube_df[col] -= np.min(cube_df[col]) gsum = cube_df.groupby(columns)[target].sum() out_cube = np.zeros(n) for ind in gsum.index:", "npts_out=250): \"\"\"compress a large number of points into a small representative sample via", "None: colnames.append(col) colnames = np.array(colnames) centers = centers[colnames].copy() return centers class PointFilter(object): \"\"\"PointFilter", "def cubeify(df, n, columns, target=\"weights\"): \"\"\"bins up a dataframe into a densely sampled", "\"weights\" in self.point_cloud.columns: self.point_cloud[\"weights\"] = np.repeat( 1.0/len(point_cloud), len(point_cloud)) self.filtered_columns = filtered_columns self.sigma_vec =", "def __init__( self, point_cloud, filtered_columns, sigma_vec, copy=True, ): \"\"\" point_cloud: pandas DataFrame the", "- 1) cube_df[col] = np.around(df[col]/cdx).astype(int) cube_df[col] -= np.min(cube_df[col]) gsum = cube_df.groupby(columns)[target].sum() out_cube =", "n_in = pts.size() cut_idx = min(len(centers), npts_out) thresh = np.sort(n_in)[-cut_idx] mask = (n_in", "pandas as pd import re import latbin def lineify(df, n, column): return cubeify(df,", "colnames = [] for col in centers.columns: if re.match('q_', col) is None: colnames.append(col)", "= filtered_columns self.sigma_vec = sigma_vec def get_weights(self, point_cloud): pdata = point_cloud[self.filtered_columns] filter_pts =", "averaging. \"\"\" Aparam = latbin.ALattice(len(df.columns), scale=bin_size) pts = Aparam.bin(df) centers = pts.mean() n_in", "np.repeat( 1.0/len(point_cloud), len(point_cloud)) self.filtered_columns = filtered_columns self.sigma_vec = sigma_vec def get_weights(self, point_cloud): pdata", "point_cloud: pandas DataFrame the points in this filter filtered_columns: list the column names", "= point_cloud if not \"weights\" in self.point_cloud.columns: self.point_cloud[\"weights\"] = np.repeat( 1.0/len(point_cloud), len(point_cloud)) self.filtered_columns", "point_cloud[self.filtered_columns] filter_pts = self.point_cloud[self.filtered_columns] sim_matrix = latbin.matching.sparse_distance_matrix( pdata/self.sigma_vec, filter_pts/self.sigma_vec, ) weights = sim_matrix", "the column names to bin on target: column name the column to sum", "gsum = cube_df.groupby(columns)[target].sum() out_cube = np.zeros(n) for ind in gsum.index: out_cube[ind] = gsum.ix[ind]", "of points into a small representative sample via multidimensional histogramming and averaging. \"\"\"", "be made. \"\"\" if copy: point_cloud = point_cloud.copy() self.point_cloud = point_cloud if not", "\"\"\"bins up a dataframe into a densely sampled cube n: list the dimensions", "and averaging. \"\"\" Aparam = latbin.ALattice(len(df.columns), scale=bin_size) pts = Aparam.bin(df) centers = pts.mean()", "up for each bin returns cube: ndarray the \"cubeified\" data \"\"\" cube_df =", "Aparam = latbin.ALattice(len(df.columns), scale=bin_size) pts = Aparam.bin(df) centers = pts.mean() n_in = pts.size()", "filtered_columns, sigma_vec, copy=True, ): \"\"\" point_cloud: pandas DataFrame the points in this filter", "via multidimensional histogramming and averaging. \"\"\" Aparam = latbin.ALattice(len(df.columns), scale=bin_size) pts = Aparam.bin(df)", "to filter on sigma_vec: ndarray the distance scale to use along each dimension", "along each dimension copy: bool if False a copy of the input dataframe", "-= np.min(cube_df[col]) gsum = cube_df.groupby(columns)[target].sum() out_cube = np.zeros(n) for ind in gsum.index: out_cube[ind]", "dimensions. \"\"\" def __init__( self, point_cloud, filtered_columns, sigma_vec, copy=True, ): \"\"\" point_cloud: pandas", "copy of the input dataframe will not be made. \"\"\" if copy: point_cloud", "column): return cubeify(df, [n], [column]) def squareify(df, nx, ny, xcol, ycol): return cubeify(df,", "filter_pts = self.point_cloud[self.filtered_columns] sim_matrix = latbin.matching.sparse_distance_matrix( pdata/self.sigma_vec, filter_pts/self.sigma_vec, ) weights = sim_matrix *", "\"\"\" if copy: point_cloud = point_cloud.copy() self.point_cloud = point_cloud if not \"weights\" in", "cubeify(df, n, columns, target=\"weights\"): \"\"\"bins up a dataframe into a densely sampled cube", "= centers[colnames].copy() return centers class PointFilter(object): \"\"\"PointFilter handles efficiently calculating distances to a" ]
[ "models.constants import RAY from experiments.system_model_v3.configure import configure_experiment from experiments.system_model_v3.run import run_experiment from experiments.utils", "- 1 MONTE_CARLO_RUNS = 1 sweeps = { 'controller_enabled': [True,False], } # Configure", "merge_parameter_sweep from radcad.core import generate_parameter_sweep SIMULATION_TIMESTEPS = 8758 #len(eth_price_df) - 1 MONTE_CARLO_RUNS =", "params_update, experiment_metrics = configure_experiment(sweeps, timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS) params.update(params_update) # Override parameters params_override = {", "now = datetime.datetime.now() dir_path = os.path.dirname(os.path.realpath(__file__)) experiment_folder = __file__.split('.py')[0] results_id = now.isoformat() if", "Configure sweep and update parameters params_update, experiment_metrics = configure_experiment(sweeps, timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS) params.update(params_update) #", "#len(eth_price_df) - 1 MONTE_CARLO_RUNS = 1 sweeps = { 'controller_enabled': [True,False], } #", "update parameters params_update, experiment_metrics = configure_experiment(sweeps, timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS) params.update(params_update) # Override parameters params_override", "timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS) params.update(params_update) # Override parameters params_override = { 'liquidity_demand_enabled': [False], } params.update(params_override)", "if __name__ == '__main__': run_experiment(results_id, experiment_folder, experiment_metrics, timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS, params=params, initial_state=state_variables, save_file=True, save_logs=True)", "[False], } params.update(params_override) # Experiment details now = datetime.datetime.now() dir_path = os.path.dirname(os.path.realpath(__file__)) experiment_folder", "params from models.system_model_v3.model.state_variables.init import state_variables from models.constants import RAY from experiments.system_model_v3.configure import configure_experiment", "from experiments.system_model_v3.run import run_experiment from experiments.utils import save_to_HDF5, batch, merge_parameter_sweep from radcad.core import", "configure_experiment from experiments.system_model_v3.run import run_experiment from experiments.utils import save_to_HDF5, batch, merge_parameter_sweep from radcad.core", "import RAY from experiments.system_model_v3.configure import configure_experiment from experiments.system_model_v3.run import run_experiment from experiments.utils import", "models.system_model_v3.model.params.init import params from models.system_model_v3.model.state_variables.init import state_variables from models.constants import RAY from experiments.system_model_v3.configure", "save_to_HDF5, batch, merge_parameter_sweep from radcad.core import generate_parameter_sweep SIMULATION_TIMESTEPS = 8758 #len(eth_price_df) - 1", "import datetime import os from models.system_model_v3.model.params.init import params from models.system_model_v3.model.state_variables.init import state_variables from", "# Configure sweep and update parameters params_update, experiment_metrics = configure_experiment(sweeps, timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS) params.update(params_update)", "results_id = now.isoformat() if __name__ == '__main__': run_experiment(results_id, experiment_folder, experiment_metrics, timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS, params=params,", "batch, merge_parameter_sweep from radcad.core import generate_parameter_sweep SIMULATION_TIMESTEPS = 8758 #len(eth_price_df) - 1 MONTE_CARLO_RUNS", "from models.system_model_v3.model.state_variables.init import state_variables from models.constants import RAY from experiments.system_model_v3.configure import configure_experiment from", "details now = datetime.datetime.now() dir_path = os.path.dirname(os.path.realpath(__file__)) experiment_folder = __file__.split('.py')[0] results_id = now.isoformat()", "= { 'liquidity_demand_enabled': [False], } params.update(params_override) # Experiment details now = datetime.datetime.now() dir_path", "configure_experiment(sweeps, timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS) params.update(params_update) # Override parameters params_override = { 'liquidity_demand_enabled': [False], }", "datetime import os from models.system_model_v3.model.params.init import params from models.system_model_v3.model.state_variables.init import state_variables from models.constants", "datetime.datetime.now() dir_path = os.path.dirname(os.path.realpath(__file__)) experiment_folder = __file__.split('.py')[0] results_id = now.isoformat() if __name__ ==", "os.path.dirname(os.path.realpath(__file__)) experiment_folder = __file__.split('.py')[0] results_id = now.isoformat() if __name__ == '__main__': run_experiment(results_id, experiment_folder,", "'liquidity_demand_enabled': [False], } params.update(params_override) # Experiment details now = datetime.datetime.now() dir_path = os.path.dirname(os.path.realpath(__file__))", "runs=MONTE_CARLO_RUNS) params.update(params_update) # Override parameters params_override = { 'liquidity_demand_enabled': [False], } params.update(params_override) #", "1 sweeps = { 'controller_enabled': [True,False], } # Configure sweep and update parameters", "# Experiment details now = datetime.datetime.now() dir_path = os.path.dirname(os.path.realpath(__file__)) experiment_folder = __file__.split('.py')[0] results_id", "= { 'controller_enabled': [True,False], } # Configure sweep and update parameters params_update, experiment_metrics", "experiments.utils import save_to_HDF5, batch, merge_parameter_sweep from radcad.core import generate_parameter_sweep SIMULATION_TIMESTEPS = 8758 #len(eth_price_df)", "params_override = { 'liquidity_demand_enabled': [False], } params.update(params_override) # Experiment details now = datetime.datetime.now()", "from models.constants import RAY from experiments.system_model_v3.configure import configure_experiment from experiments.system_model_v3.run import run_experiment from", "{ 'controller_enabled': [True,False], } # Configure sweep and update parameters params_update, experiment_metrics =", "} # Configure sweep and update parameters params_update, experiment_metrics = configure_experiment(sweeps, timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS)", "experiments.system_model_v3.run import run_experiment from experiments.utils import save_to_HDF5, batch, merge_parameter_sweep from radcad.core import generate_parameter_sweep", "SIMULATION_TIMESTEPS = 8758 #len(eth_price_df) - 1 MONTE_CARLO_RUNS = 1 sweeps = { 'controller_enabled':", "experiment_metrics = configure_experiment(sweeps, timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS) params.update(params_update) # Override parameters params_override = { 'liquidity_demand_enabled':", "= 1 sweeps = { 'controller_enabled': [True,False], } # Configure sweep and update", "state_variables from models.constants import RAY from experiments.system_model_v3.configure import configure_experiment from experiments.system_model_v3.run import run_experiment", "{ 'liquidity_demand_enabled': [False], } params.update(params_override) # Experiment details now = datetime.datetime.now() dir_path =", "import run_experiment from experiments.utils import save_to_HDF5, batch, merge_parameter_sweep from radcad.core import generate_parameter_sweep SIMULATION_TIMESTEPS", "'controller_enabled': [True,False], } # Configure sweep and update parameters params_update, experiment_metrics = configure_experiment(sweeps,", "parameters params_override = { 'liquidity_demand_enabled': [False], } params.update(params_override) # Experiment details now =", "experiments.system_model_v3.configure import configure_experiment from experiments.system_model_v3.run import run_experiment from experiments.utils import save_to_HDF5, batch, merge_parameter_sweep", "[True,False], } # Configure sweep and update parameters params_update, experiment_metrics = configure_experiment(sweeps, timesteps=SIMULATION_TIMESTEPS,", "RAY from experiments.system_model_v3.configure import configure_experiment from experiments.system_model_v3.run import run_experiment from experiments.utils import save_to_HDF5,", "from experiments.utils import save_to_HDF5, batch, merge_parameter_sweep from radcad.core import generate_parameter_sweep SIMULATION_TIMESTEPS = 8758", "from radcad.core import generate_parameter_sweep SIMULATION_TIMESTEPS = 8758 #len(eth_price_df) - 1 MONTE_CARLO_RUNS = 1", "= __file__.split('.py')[0] results_id = now.isoformat() if __name__ == '__main__': run_experiment(results_id, experiment_folder, experiment_metrics, timesteps=SIMULATION_TIMESTEPS,", "import configure_experiment from experiments.system_model_v3.run import run_experiment from experiments.utils import save_to_HDF5, batch, merge_parameter_sweep from", "models.system_model_v3.model.state_variables.init import state_variables from models.constants import RAY from experiments.system_model_v3.configure import configure_experiment from experiments.system_model_v3.run", "import generate_parameter_sweep SIMULATION_TIMESTEPS = 8758 #len(eth_price_df) - 1 MONTE_CARLO_RUNS = 1 sweeps =", "8758 #len(eth_price_df) - 1 MONTE_CARLO_RUNS = 1 sweeps = { 'controller_enabled': [True,False], }", "import params from models.system_model_v3.model.state_variables.init import state_variables from models.constants import RAY from experiments.system_model_v3.configure import", "= now.isoformat() if __name__ == '__main__': run_experiment(results_id, experiment_folder, experiment_metrics, timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS, params=params, initial_state=state_variables,", "} params.update(params_override) # Experiment details now = datetime.datetime.now() dir_path = os.path.dirname(os.path.realpath(__file__)) experiment_folder =", "= datetime.datetime.now() dir_path = os.path.dirname(os.path.realpath(__file__)) experiment_folder = __file__.split('.py')[0] results_id = now.isoformat() if __name__", "import state_variables from models.constants import RAY from experiments.system_model_v3.configure import configure_experiment from experiments.system_model_v3.run import", "from experiments.system_model_v3.configure import configure_experiment from experiments.system_model_v3.run import run_experiment from experiments.utils import save_to_HDF5, batch,", "generate_parameter_sweep SIMULATION_TIMESTEPS = 8758 #len(eth_price_df) - 1 MONTE_CARLO_RUNS = 1 sweeps = {", "from models.system_model_v3.model.params.init import params from models.system_model_v3.model.state_variables.init import state_variables from models.constants import RAY from", "import save_to_HDF5, batch, merge_parameter_sweep from radcad.core import generate_parameter_sweep SIMULATION_TIMESTEPS = 8758 #len(eth_price_df) -", "# Override parameters params_override = { 'liquidity_demand_enabled': [False], } params.update(params_override) # Experiment details", "parameters params_update, experiment_metrics = configure_experiment(sweeps, timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS) params.update(params_update) # Override parameters params_override =", "sweeps = { 'controller_enabled': [True,False], } # Configure sweep and update parameters params_update,", "experiment_folder = __file__.split('.py')[0] results_id = now.isoformat() if __name__ == '__main__': run_experiment(results_id, experiment_folder, experiment_metrics,", "Experiment details now = datetime.datetime.now() dir_path = os.path.dirname(os.path.realpath(__file__)) experiment_folder = __file__.split('.py')[0] results_id =", "= os.path.dirname(os.path.realpath(__file__)) experiment_folder = __file__.split('.py')[0] results_id = now.isoformat() if __name__ == '__main__': run_experiment(results_id,", "sweep and update parameters params_update, experiment_metrics = configure_experiment(sweeps, timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS) params.update(params_update) # Override", "Override parameters params_override = { 'liquidity_demand_enabled': [False], } params.update(params_override) # Experiment details now", "__file__.split('.py')[0] results_id = now.isoformat() if __name__ == '__main__': run_experiment(results_id, experiment_folder, experiment_metrics, timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS,", "os from models.system_model_v3.model.params.init import params from models.system_model_v3.model.state_variables.init import state_variables from models.constants import RAY", "1 MONTE_CARLO_RUNS = 1 sweeps = { 'controller_enabled': [True,False], } # Configure sweep", "= configure_experiment(sweeps, timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS) params.update(params_update) # Override parameters params_override = { 'liquidity_demand_enabled': [False],", "radcad.core import generate_parameter_sweep SIMULATION_TIMESTEPS = 8758 #len(eth_price_df) - 1 MONTE_CARLO_RUNS = 1 sweeps", "now.isoformat() if __name__ == '__main__': run_experiment(results_id, experiment_folder, experiment_metrics, timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS, params=params, initial_state=state_variables, save_file=True,", "run_experiment from experiments.utils import save_to_HDF5, batch, merge_parameter_sweep from radcad.core import generate_parameter_sweep SIMULATION_TIMESTEPS =", "and update parameters params_update, experiment_metrics = configure_experiment(sweeps, timesteps=SIMULATION_TIMESTEPS, runs=MONTE_CARLO_RUNS) params.update(params_update) # Override parameters", "dir_path = os.path.dirname(os.path.realpath(__file__)) experiment_folder = __file__.split('.py')[0] results_id = now.isoformat() if __name__ == '__main__':", "params.update(params_update) # Override parameters params_override = { 'liquidity_demand_enabled': [False], } params.update(params_override) # Experiment", "MONTE_CARLO_RUNS = 1 sweeps = { 'controller_enabled': [True,False], } # Configure sweep and", "import os from models.system_model_v3.model.params.init import params from models.system_model_v3.model.state_variables.init import state_variables from models.constants import", "= 8758 #len(eth_price_df) - 1 MONTE_CARLO_RUNS = 1 sweeps = { 'controller_enabled': [True,False],", "params.update(params_override) # Experiment details now = datetime.datetime.now() dir_path = os.path.dirname(os.path.realpath(__file__)) experiment_folder = __file__.split('.py')[0]" ]
[ "+= 1 if pointA[1] < pointB[1] else -1 # 0 = x, 1", "in ascending order pointA, pointB = pointB, pointA for i in range(pointA[1], pointB[1]", "# 0 = x, 1 = y grid[target[1]][target[0]] += 1 while target[0] !=", "in range(GRID_SIZE)] for j in range (GRID_SIZE)] for row in data: pointA, pointB", "1 if pointA[0] < pointB[0] else -1 target[1] += 1 if pointA[1] <", "'__main__': data = open('input').read().splitlines() grid = [[0 for i in range(GRID_SIZE)] for j", "= open('input').read().splitlines() grid = [[0 for i in range(GRID_SIZE)] for j in range", "< pointB[1] else -1 # 0 = x, 1 = y grid[target[1]][target[0]] +=", "in ascending order if pointA[0] > pointB[0]: pointA, pointB = pointB, pointA for", "horizontal lines if pointA[0] == pointB[0]: if pointA[1] > pointB[1]: # swap points", "-1 target[1] += 1 if pointA[1] < pointB[1] else -1 # 0 =", "in pointB.split(',')] # only horizontal and vertical for now # horizontal lines if", "now # horizontal lines if pointA[0] == pointB[0]: if pointA[1] > pointB[1]: #", "for i in pointA.split(',')] pointB = [int(i) for i in pointB.split(',')] # only", "pointB, pointA target = pointA # 0 = x, 1 = y grid[target[1]][target[0]]", "-1 # 0 = x, 1 = y grid[target[1]][target[0]] += 1 count =", "pointB[0]: if pointA[1] > pointB[1]: # swap points to always draw in ascending", "to always draw in ascending order pointA, pointB = pointB, pointA for i", "vertical lines elif pointA[1] == pointB[1]: # swap points to always draw in", "else -1 target[1] += 1 if pointA[1] < pointB[1] else -1 # 0", "1 while target[0] != pointB[0] and target[1] != pointB[1]: target[0] += 1 if", "draw in ascending order pointA, pointB = pointB, pointA for i in range(pointA[1],", "+ 1): grid[pointA[1]][i] += 1 # diagonal lines else: if pointA[0] > pointB[0]", "pointB[1]: # swap points to always draw in ascending order if pointA[0] >", "pointB[1]: target[0] += 1 if pointA[0] < pointB[0] else -1 target[1] += 1", "grid[i][pointA[0]] += 1 # vertical lines elif pointA[1] == pointB[1]: # swap points", "pointB[0] else -1 target[1] += 1 if pointA[1] < pointB[1] else -1 #", "pointB[1]: # swap points to always draw in ascending order pointA, pointB =", "# horizontal lines if pointA[0] == pointB[0]: if pointA[1] > pointB[1]: # swap", "range(pointA[1], pointB[1] + 1): grid[i][pointA[0]] += 1 # vertical lines elif pointA[1] ==", "grid[target[1]][target[0]] += 1 while target[0] != pointB[0] and target[1] != pointB[1]: target[0] +=", "points to always draw in ascending order if pointA[0] > pointB[0]: pointA, pointB", "swap points to always draw in ascending order pointA, pointB = pointB, pointA", "python3 GRID_SIZE = 1000 if __name__ == '__main__': data = open('input').read().splitlines() grid =", "pointA.split(',')] pointB = [int(i) for i in pointB.split(',')] # only horizontal and vertical", "= y grid[target[1]][target[0]] += 1 count = 0 for row in grid: for", "always draw in ascending order pointA, pointB = pointB, pointA for i in", "y grid[target[1]][target[0]] += 1 while target[0] != pointB[0] and target[1] != pointB[1]: target[0]", "pointA for i in range(pointA[1], pointB[1] + 1): grid[i][pointA[0]] += 1 # vertical", "pointB[0] and target[1] != pointB[1]: target[0] += 1 if pointA[0] < pointB[0] else", "elif pointA[1] == pointB[1]: # swap points to always draw in ascending order", "+= 1 if pointA[0] < pointB[0] else -1 target[1] += 1 if pointA[1]", "pointA # 0 = x, 1 = y grid[target[1]][target[0]] += 1 while target[0]", "pointA[0] > pointB[0]: pointA, pointB = pointB, pointA for i in range(pointA[0], pointB[0]", "target[1] != pointB[1]: target[0] += 1 if pointA[0] < pointB[0] else -1 target[1]", "for i in range(pointA[1], pointB[1] + 1): grid[i][pointA[0]] += 1 # vertical lines", "1 = y grid[target[1]][target[0]] += 1 while target[0] != pointB[0] and target[1] !=", "in pointA.split(',')] pointB = [int(i) for i in pointB.split(',')] # only horizontal and", "= x, 1 = y grid[target[1]][target[0]] += 1 while target[0] != pointB[0] and", "-> ') pointA = [int(i) for i in pointA.split(',')] pointB = [int(i) for", "range(pointA[0], pointB[0] + 1): grid[pointA[1]][i] += 1 # diagonal lines else: if pointA[0]", "else: if pointA[0] > pointB[0] or pointA[1] > pointB[1]: pointA, pointB = pointB,", "range (GRID_SIZE)] for row in data: pointA, pointB = row.split(' -> ') pointA", "swap points to always draw in ascending order if pointA[0] > pointB[0]: pointA,", "+= 1 while target[0] != pointB[0] and target[1] != pointB[1]: target[0] += 1", "in data: pointA, pointB = row.split(' -> ') pointA = [int(i) for i", "= [int(i) for i in pointA.split(',')] pointB = [int(i) for i in pointB.split(',')]", "pointB[0] or pointA[1] > pointB[1]: pointA, pointB = pointB, pointA target = pointA", "== pointB[1]: # swap points to always draw in ascending order if pointA[0]", "data: pointA, pointB = row.split(' -> ') pointA = [int(i) for i in", "pointB = pointB, pointA target = pointA # 0 = x, 1 =", "pointB = row.split(' -> ') pointA = [int(i) for i in pointA.split(',')] pointB", "pointA, pointB = pointB, pointA for i in range(pointA[1], pointB[1] + 1): grid[i][pointA[0]]", "== '__main__': data = open('input').read().splitlines() grid = [[0 for i in range(GRID_SIZE)] for", "= 0 for row in grid: for col in row: if col >=", "i in range(GRID_SIZE)] for j in range (GRID_SIZE)] for row in data: pointA,", "= pointB, pointA for i in range(pointA[1], pointB[1] + 1): grid[i][pointA[0]] += 1", "for i in range(GRID_SIZE)] for j in range (GRID_SIZE)] for row in data:", "pointA = [int(i) for i in pointA.split(',')] pointB = [int(i) for i in", "0 for row in grid: for col in row: if col >= 2:", "pointB[1] else -1 # 0 = x, 1 = y grid[target[1]][target[0]] += 1", "i in pointA.split(',')] pointB = [int(i) for i in pointB.split(',')] # only horizontal", "ascending order if pointA[0] > pointB[0]: pointA, pointB = pointB, pointA for i", "grid[pointA[1]][i] += 1 # diagonal lines else: if pointA[0] > pointB[0] or pointA[1]", "pointB = pointB, pointA for i in range(pointA[0], pointB[0] + 1): grid[pointA[1]][i] +=", "lines else: if pointA[0] > pointB[0] or pointA[1] > pointB[1]: pointA, pointB =", "or pointA[1] > pointB[1]: pointA, pointB = pointB, pointA target = pointA #", "pointA for i in range(pointA[0], pointB[0] + 1): grid[pointA[1]][i] += 1 # diagonal", "1 if pointA[1] < pointB[1] else -1 # 0 = x, 1 =", "!= pointB[0] and target[1] != pointB[1]: target[0] += 1 if pointA[0] < pointB[0]", "[int(i) for i in pointB.split(',')] # only horizontal and vertical for now #", "pointA, pointB = pointB, pointA for i in range(pointA[0], pointB[0] + 1): grid[pointA[1]][i]", "= pointA # 0 = x, 1 = y grid[target[1]][target[0]] += 1 while", "= y grid[target[1]][target[0]] += 1 while target[0] != pointB[0] and target[1] != pointB[1]:", "target[0] += 1 if pointA[0] < pointB[0] else -1 target[1] += 1 if", "grid[target[1]][target[0]] += 1 count = 0 for row in grid: for col in", "for row in grid: for col in row: if col >= 2: count", "range(GRID_SIZE)] for j in range (GRID_SIZE)] for row in data: pointA, pointB =", "< pointB[0] else -1 target[1] += 1 if pointA[1] < pointB[1] else -1", "and target[1] != pointB[1]: target[0] += 1 if pointA[0] < pointB[0] else -1", "row in grid: for col in row: if col >= 2: count +=", "> pointB[1]: # swap points to always draw in ascending order pointA, pointB", "= pointB, pointA target = pointA # 0 = x, 1 = y", "# diagonal lines else: if pointA[0] > pointB[0] or pointA[1] > pointB[1]: pointA,", "pointB, pointA for i in range(pointA[1], pointB[1] + 1): grid[i][pointA[0]] += 1 #", "> pointB[0]: pointA, pointB = pointB, pointA for i in range(pointA[0], pointB[0] +", "pointA, pointB = row.split(' -> ') pointA = [int(i) for i in pointA.split(',')]", "for i in pointB.split(',')] # only horizontal and vertical for now # horizontal", "order if pointA[0] > pointB[0]: pointA, pointB = pointB, pointA for i in", "+= 1 # vertical lines elif pointA[1] == pointB[1]: # swap points to", "in range (GRID_SIZE)] for row in data: pointA, pointB = row.split(' -> ')", "if pointA[0] > pointB[0] or pointA[1] > pointB[1]: pointA, pointB = pointB, pointA", "for i in range(pointA[0], pointB[0] + 1): grid[pointA[1]][i] += 1 # diagonal lines", "diagonal lines else: if pointA[0] > pointB[0] or pointA[1] > pointB[1]: pointA, pointB", "if pointA[1] > pointB[1]: # swap points to always draw in ascending order", "= [int(i) for i in pointB.split(',')] # only horizontal and vertical for now", "pointB, pointA for i in range(pointA[0], pointB[0] + 1): grid[pointA[1]][i] += 1 #", "row in data: pointA, pointB = row.split(' -> ') pointA = [int(i) for", "1 # vertical lines elif pointA[1] == pointB[1]: # swap points to always", "= [[0 for i in range(GRID_SIZE)] for j in range (GRID_SIZE)] for row", "points to always draw in ascending order pointA, pointB = pointB, pointA for", "i in range(pointA[0], pointB[0] + 1): grid[pointA[1]][i] += 1 # diagonal lines else:", "[int(i) for i in pointA.split(',')] pointB = [int(i) for i in pointB.split(',')] #", "pointB[0]: pointA, pointB = pointB, pointA for i in range(pointA[0], pointB[0] + 1):", "# 0 = x, 1 = y grid[target[1]][target[0]] += 1 count = 0", "+= 1 count = 0 for row in grid: for col in row:", "+ 1): grid[i][pointA[0]] += 1 # vertical lines elif pointA[1] == pointB[1]: #", "pointA[1] > pointB[1]: pointA, pointB = pointB, pointA target = pointA # 0", "x, 1 = y grid[target[1]][target[0]] += 1 while target[0] != pointB[0] and target[1]", "i in pointB.split(',')] # only horizontal and vertical for now # horizontal lines", "in range(pointA[1], pointB[1] + 1): grid[i][pointA[0]] += 1 # vertical lines elif pointA[1]", "# vertical lines elif pointA[1] == pointB[1]: # swap points to always draw", "only horizontal and vertical for now # horizontal lines if pointA[0] == pointB[0]:", "pointB = pointB, pointA for i in range(pointA[1], pointB[1] + 1): grid[i][pointA[0]] +=", "draw in ascending order if pointA[0] > pointB[0]: pointA, pointB = pointB, pointA", "else -1 # 0 = x, 1 = y grid[target[1]][target[0]] += 1 count", "if pointA[0] > pointB[0]: pointA, pointB = pointB, pointA for i in range(pointA[0],", "> pointB[0] or pointA[1] > pointB[1]: pointA, pointB = pointB, pointA target =", "j in range (GRID_SIZE)] for row in data: pointA, pointB = row.split(' ->", "0 = x, 1 = y grid[target[1]][target[0]] += 1 count = 0 for", "pointB = [int(i) for i in pointB.split(',')] # only horizontal and vertical for", "> pointB[1]: pointA, pointB = pointB, pointA target = pointA # 0 =", "__name__ == '__main__': data = open('input').read().splitlines() grid = [[0 for i in range(GRID_SIZE)]", "for row in data: pointA, pointB = row.split(' -> ') pointA = [int(i)", "pointA[1] < pointB[1] else -1 # 0 = x, 1 = y grid[target[1]][target[0]]", "= row.split(' -> ') pointA = [int(i) for i in pointA.split(',')] pointB =", "always draw in ascending order if pointA[0] > pointB[0]: pointA, pointB = pointB,", "!= pointB[1]: target[0] += 1 if pointA[0] < pointB[0] else -1 target[1] +=", "1 = y grid[target[1]][target[0]] += 1 count = 0 for row in grid:", "1000 if __name__ == '__main__': data = open('input').read().splitlines() grid = [[0 for i", "y grid[target[1]][target[0]] += 1 count = 0 for row in grid: for col", "target[1] += 1 if pointA[1] < pointB[1] else -1 # 0 = x,", "pointA[0] < pointB[0] else -1 target[1] += 1 if pointA[1] < pointB[1] else", "GRID_SIZE = 1000 if __name__ == '__main__': data = open('input').read().splitlines() grid = [[0", "grid = [[0 for i in range(GRID_SIZE)] for j in range (GRID_SIZE)] for", "i in range(pointA[1], pointB[1] + 1): grid[i][pointA[0]] += 1 # vertical lines elif", "to always draw in ascending order if pointA[0] > pointB[0]: pointA, pointB =", "1 # diagonal lines else: if pointA[0] > pointB[0] or pointA[1] > pointB[1]:", "= x, 1 = y grid[target[1]][target[0]] += 1 count = 0 for row", "') pointA = [int(i) for i in pointA.split(',')] pointB = [int(i) for i", "in range(pointA[0], pointB[0] + 1): grid[pointA[1]][i] += 1 # diagonal lines else: if", "[[0 for i in range(GRID_SIZE)] for j in range (GRID_SIZE)] for row in", "== pointB[0]: if pointA[1] > pointB[1]: # swap points to always draw in", "1 count = 0 for row in grid: for col in row: if", "count = 0 for row in grid: for col in row: if col", "if pointA[0] < pointB[0] else -1 target[1] += 1 if pointA[1] < pointB[1]", "lines elif pointA[1] == pointB[1]: # swap points to always draw in ascending", "if __name__ == '__main__': data = open('input').read().splitlines() grid = [[0 for i in", "and vertical for now # horizontal lines if pointA[0] == pointB[0]: if pointA[1]", "order pointA, pointB = pointB, pointA for i in range(pointA[1], pointB[1] + 1):", "0 = x, 1 = y grid[target[1]][target[0]] += 1 while target[0] != pointB[0]", "pointA[0] > pointB[0] or pointA[1] > pointB[1]: pointA, pointB = pointB, pointA target", "in grid: for col in row: if col >= 2: count += 1", "if pointA[0] == pointB[0]: if pointA[1] > pointB[1]: # swap points to always", "= pointB, pointA for i in range(pointA[0], pointB[0] + 1): grid[pointA[1]][i] += 1", "if pointA[1] < pointB[1] else -1 # 0 = x, 1 = y", "grid: for col in row: if col >= 2: count += 1 print(f'{count}')", "= 1000 if __name__ == '__main__': data = open('input').read().splitlines() grid = [[0 for", "open('input').read().splitlines() grid = [[0 for i in range(GRID_SIZE)] for j in range (GRID_SIZE)]", "data = open('input').read().splitlines() grid = [[0 for i in range(GRID_SIZE)] for j in", "x, 1 = y grid[target[1]][target[0]] += 1 count = 0 for row in", "+= 1 # diagonal lines else: if pointA[0] > pointB[0] or pointA[1] >", "target = pointA # 0 = x, 1 = y grid[target[1]][target[0]] += 1", "horizontal and vertical for now # horizontal lines if pointA[0] == pointB[0]: if", "pointA[1] > pointB[1]: # swap points to always draw in ascending order pointA,", "ascending order pointA, pointB = pointB, pointA for i in range(pointA[1], pointB[1] +", "while target[0] != pointB[0] and target[1] != pointB[1]: target[0] += 1 if pointA[0]", "pointA, pointB = pointB, pointA target = pointA # 0 = x, 1", "for now # horizontal lines if pointA[0] == pointB[0]: if pointA[1] > pointB[1]:", "# only horizontal and vertical for now # horizontal lines if pointA[0] ==", "pointB[1] + 1): grid[i][pointA[0]] += 1 # vertical lines elif pointA[1] == pointB[1]:", "1): grid[pointA[1]][i] += 1 # diagonal lines else: if pointA[0] > pointB[0] or", "lines if pointA[0] == pointB[0]: if pointA[1] > pointB[1]: # swap points to", "# swap points to always draw in ascending order pointA, pointB = pointB,", "pointA[1] == pointB[1]: # swap points to always draw in ascending order if", "# swap points to always draw in ascending order if pointA[0] > pointB[0]:", "target[0] != pointB[0] and target[1] != pointB[1]: target[0] += 1 if pointA[0] <", "vertical for now # horizontal lines if pointA[0] == pointB[0]: if pointA[1] >", "row.split(' -> ') pointA = [int(i) for i in pointA.split(',')] pointB = [int(i)", "pointA[0] == pointB[0]: if pointA[1] > pointB[1]: # swap points to always draw", "#!/usr/bin/env python3 GRID_SIZE = 1000 if __name__ == '__main__': data = open('input').read().splitlines() grid", "1): grid[i][pointA[0]] += 1 # vertical lines elif pointA[1] == pointB[1]: # swap", "pointB[1]: pointA, pointB = pointB, pointA target = pointA # 0 = x,", "for j in range (GRID_SIZE)] for row in data: pointA, pointB = row.split('", "pointB[0] + 1): grid[pointA[1]][i] += 1 # diagonal lines else: if pointA[0] >", "pointB.split(',')] # only horizontal and vertical for now # horizontal lines if pointA[0]", "(GRID_SIZE)] for row in data: pointA, pointB = row.split(' -> ') pointA =", "pointA target = pointA # 0 = x, 1 = y grid[target[1]][target[0]] +=" ]
[ "get_ansible from get_helm import get_helm from get_skaffold import get_skaffold from get_docker import get_docker", "from c_registry import c_registry if __name__ == '__main__': edit_hosts() get_docker() # c_registry() #", "get_helm from get_skaffold import get_skaffold from get_docker import get_docker from get_minikube import get_minikube", "import c_registry if __name__ == '__main__': edit_hosts() get_docker() # c_registry() # get_ansible() #", "import edit_hosts from c_registry import c_registry if __name__ == '__main__': edit_hosts() get_docker() #", "get_minikube import get_minikube from get_regs import get_registries from edit_hosts import edit_hosts from c_registry", "__name__ == '__main__': edit_hosts() get_docker() # c_registry() # get_ansible() # #get_helm() # get_minikube()", "import get_helm from get_skaffold import get_skaffold from get_docker import get_docker from get_minikube import", "get_config import get_config from get_ansible import get_ansible from get_helm import get_helm from get_skaffold", "get_docker import get_docker from get_minikube import get_minikube from get_regs import get_registries from edit_hosts", "get_regs import get_registries from edit_hosts import edit_hosts from c_registry import c_registry if __name__", "# c_registry() # get_ansible() # #get_helm() # get_minikube() # get_skaffold() # get_config() get_registries()", "from get_regs import get_registries from edit_hosts import edit_hosts from c_registry import c_registry if", "'__main__': edit_hosts() get_docker() # c_registry() # get_ansible() # #get_helm() # get_minikube() # get_skaffold()", "import get_skaffold from get_docker import get_docker from get_minikube import get_minikube from get_regs import", "get_skaffold import get_skaffold from get_docker import get_docker from get_minikube import get_minikube from get_regs", "get_config from get_ansible import get_ansible from get_helm import get_helm from get_skaffold import get_skaffold", "from get_config import get_config from get_ansible import get_ansible from get_helm import get_helm from", "from get_skaffold import get_skaffold from get_docker import get_docker from get_minikube import get_minikube from", "from get_minikube import get_minikube from get_regs import get_registries from edit_hosts import edit_hosts from", "from get_docker import get_docker from get_minikube import get_minikube from get_regs import get_registries from", "from edit_hosts import edit_hosts from c_registry import c_registry if __name__ == '__main__': edit_hosts()", "get_registries from edit_hosts import edit_hosts from c_registry import c_registry if __name__ == '__main__':", "import get_config from get_ansible import get_ansible from get_helm import get_helm from get_skaffold import", "edit_hosts import edit_hosts from c_registry import c_registry if __name__ == '__main__': edit_hosts() get_docker()", "c_registry if __name__ == '__main__': edit_hosts() get_docker() # c_registry() # get_ansible() # #get_helm()", "edit_hosts() get_docker() # c_registry() # get_ansible() # #get_helm() # get_minikube() # get_skaffold() #", "from get_ansible import get_ansible from get_helm import get_helm from get_skaffold import get_skaffold from", "import get_docker from get_minikube import get_minikube from get_regs import get_registries from edit_hosts import", "from get_helm import get_helm from get_skaffold import get_skaffold from get_docker import get_docker from", "import get_ansible from get_helm import get_helm from get_skaffold import get_skaffold from get_docker import", "get_ansible import get_ansible from get_helm import get_helm from get_skaffold import get_skaffold from get_docker", "get_skaffold from get_docker import get_docker from get_minikube import get_minikube from get_regs import get_registries", "get_docker from get_minikube import get_minikube from get_regs import get_registries from edit_hosts import edit_hosts", "get_minikube from get_regs import get_registries from edit_hosts import edit_hosts from c_registry import c_registry", "edit_hosts from c_registry import c_registry if __name__ == '__main__': edit_hosts() get_docker() # c_registry()", "get_docker() # c_registry() # get_ansible() # #get_helm() # get_minikube() # get_skaffold() # get_config()", "import get_minikube from get_regs import get_registries from edit_hosts import edit_hosts from c_registry import", "get_helm import get_helm from get_skaffold import get_skaffold from get_docker import get_docker from get_minikube", "== '__main__': edit_hosts() get_docker() # c_registry() # get_ansible() # #get_helm() # get_minikube() #", "c_registry import c_registry if __name__ == '__main__': edit_hosts() get_docker() # c_registry() # get_ansible()", "import get_registries from edit_hosts import edit_hosts from c_registry import c_registry if __name__ ==", "if __name__ == '__main__': edit_hosts() get_docker() # c_registry() # get_ansible() # #get_helm() #", "c_registry() # get_ansible() # #get_helm() # get_minikube() # get_skaffold() # get_config() get_registries() edit_hosts()" ]
[ "Counter @pytest.fixture def weight_names(): return [ \"EventWeight\", # \"MuonWeight\", \"ElectronWeight\", \"JetWeight\", ] @pytest.fixture", "\"JetWeight\", ] @pytest.fixture def counter(weight_names): return Counter(weight_names) def test_init(weight_names, full_wrapped_tree): c = Counter(weight_names)", "assert c.counts == (0, 0.0) assert c._w_counts == (0.0) def test_increment_mc(counter, full_wrapped_tree): counter.increment(full_wrapped_tree,", "c._weight_names == weight_names assert c.counts == (0, 0.0) assert c._w_counts == (0.0) def", "len(full_wrapped_tree) assert counter._w_counts == (n_events) assert counter.counts == (n_events, n_events) def test_add(counter, full_wrapped_tree):", "(n_events) assert counter.counts == (n_events, n_events) def test_add(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) counter.add(counter) n_events", "https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx((expected_weighted_sum * 2,), 2e-4) assert counter.counts == (n_events *", "pytest.approx(np.array([expected_weighted_sum]), 1e-4) assert counter.counts == (n_events, pytest.approx(expected_weighted_sum, 1e-4)) def test_increment_data(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=False)", "* 2,), 2e-4) assert counter.counts == (n_events * 2, pytest.approx(expected_weighted_sum * 2, 2e-4))", "2e-4) assert counter.counts == (n_events * 2, pytest.approx(expected_weighted_sum * 2, 2e-4)) def test_increment_without_weights(full_wrapped_tree):", "\"EventWeight\", # \"MuonWeight\", \"ElectronWeight\", \"JetWeight\", ] @pytest.fixture def counter(weight_names): return Counter(weight_names) def test_init(weight_names,", "small and due to optimization # see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx(np.array([expected_weighted_sum]), 1e-4)", "full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) counter.add(counter) n_events = len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594 # expected value", "assert c._weight_names == weight_names assert c.counts == (0, 0.0) assert c._w_counts == (0.0)", "from fast_carpenter.selection.filters import Counter @pytest.fixture def weight_names(): return [ \"EventWeight\", # \"MuonWeight\", \"ElectronWeight\",", "full_wrapped_tree): c = Counter(weight_names) assert c._weight_names == weight_names assert c.counts == (0, 0.0)", "@pytest.fixture def weight_names(): return [ \"EventWeight\", # \"MuonWeight\", \"ElectronWeight\", \"JetWeight\", ] @pytest.fixture def", "import Counter @pytest.fixture def weight_names(): return [ \"EventWeight\", # \"MuonWeight\", \"ElectronWeight\", \"JetWeight\", ]", "is_mc=True) n_events = len(full_wrapped_tree) with pytest.raises(IndexError): assert counter._w_counts[0] == n_events assert counter.counts ==", "2e-4)) def test_increment_without_weights(full_wrapped_tree): counter = Counter([]) counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree) with pytest.raises(IndexError):", "2, 2e-4)) def test_increment_without_weights(full_wrapped_tree): counter = Counter([]) counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree) with", "def test_increment_data(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=False) n_events = len(full_wrapped_tree) assert counter._w_counts == (n_events) assert", "taken from numpy sum, but awkward sum is used # the difference is", "test_init(weight_names, full_wrapped_tree): c = Counter(weight_names) assert c._weight_names == weight_names assert c.counts == (0,", "def weight_names(): return [ \"EventWeight\", # \"MuonWeight\", \"ElectronWeight\", \"JetWeight\", ] @pytest.fixture def counter(weight_names):", "def test_increment_without_weights(full_wrapped_tree): counter = Counter([]) counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree) with pytest.raises(IndexError): assert", "n_events = len(full_wrapped_tree) with pytest.raises(IndexError): assert counter._w_counts[0] == n_events assert counter.counts == (n_events,", "is_mc=False) n_events = len(full_wrapped_tree) assert counter._w_counts == (n_events) assert counter.counts == (n_events, n_events)", "as np import pytest from fast_carpenter.selection.filters import Counter @pytest.fixture def weight_names(): return [", "counter.add(counter) n_events = len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594 # expected value is taken from", "is_mc=True) n_events = len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594 # expected value is taken from", "is small and due to optimization # see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx((expected_weighted_sum", "counter.increment(full_wrapped_tree, is_mc=True) counter.add(counter) n_events = len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594 # expected value is", "import numpy as np import pytest from fast_carpenter.selection.filters import Counter @pytest.fixture def weight_names():", "= len(full_wrapped_tree) assert counter._w_counts == (n_events) assert counter.counts == (n_events, n_events) def test_add(counter,", "difference is small and due to optimization # see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts ==", "* 2, 2e-4)) def test_increment_without_weights(full_wrapped_tree): counter = Counter([]) counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree)", "is taken from numpy sum, but awkward sum is used # the difference", "numpy sum, but awkward sum is used # the difference is small and", "# see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx(np.array([expected_weighted_sum]), 1e-4) assert counter.counts == (n_events, pytest.approx(expected_weighted_sum,", "229.94895935058594 # expected value is taken from numpy sum, but awkward sum is", "@pytest.fixture def counter(weight_names): return Counter(weight_names) def test_init(weight_names, full_wrapped_tree): c = Counter(weight_names) assert c._weight_names", "(n_events, n_events) def test_add(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) counter.add(counter) n_events = len(full_wrapped_tree) expected_weighted_sum =", "counter.counts == (n_events * 2, pytest.approx(expected_weighted_sum * 2, 2e-4)) def test_increment_without_weights(full_wrapped_tree): counter =", "2, pytest.approx(expected_weighted_sum * 2, 2e-4)) def test_increment_without_weights(full_wrapped_tree): counter = Counter([]) counter.increment(full_wrapped_tree, is_mc=True) n_events", "sum is used # the difference is small and due to optimization #", "Counter(weight_names) def test_init(weight_names, full_wrapped_tree): c = Counter(weight_names) assert c._weight_names == weight_names assert c.counts", "= Counter(weight_names) assert c._weight_names == weight_names assert c.counts == (0, 0.0) assert c._w_counts", "assert c._w_counts == (0.0) def test_increment_mc(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree) expected_weighted_sum", "== pytest.approx(np.array([expected_weighted_sum]), 1e-4) assert counter.counts == (n_events, pytest.approx(expected_weighted_sum, 1e-4)) def test_increment_data(counter, full_wrapped_tree): counter.increment(full_wrapped_tree,", "2,), 2e-4) assert counter.counts == (n_events * 2, pytest.approx(expected_weighted_sum * 2, 2e-4)) def", "def test_add(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) counter.add(counter) n_events = len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594 #", "== (n_events * 2, pytest.approx(expected_weighted_sum * 2, 2e-4)) def test_increment_without_weights(full_wrapped_tree): counter = Counter([])", "is small and due to optimization # see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx(np.array([expected_weighted_sum]),", "== (n_events) assert counter.counts == (n_events, n_events) def test_add(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) counter.add(counter)", "see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx((expected_weighted_sum * 2,), 2e-4) assert counter.counts == (n_events", "counter = Counter([]) counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree) with pytest.raises(IndexError): assert counter._w_counts[0] ==", "c = Counter(weight_names) assert c._weight_names == weight_names assert c.counts == (0, 0.0) assert", "# expected value is taken from numpy sum, but awkward sum is used", "expected_weighted_sum = 229.94895935058594 # expected value is taken from numpy sum, but awkward", "1e-4) assert counter.counts == (n_events, pytest.approx(expected_weighted_sum, 1e-4)) def test_increment_data(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=False) n_events", "# see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx((expected_weighted_sum * 2,), 2e-4) assert counter.counts ==", "pytest from fast_carpenter.selection.filters import Counter @pytest.fixture def weight_names(): return [ \"EventWeight\", # \"MuonWeight\",", "value is taken from numpy sum, but awkward sum is used # the", "== (n_events, n_events) def test_add(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) counter.add(counter) n_events = len(full_wrapped_tree) expected_weighted_sum", "the difference is small and due to optimization # see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts", "used # the difference is small and due to optimization # see https://github.com/scikit-hep/awkward-1.0/issues/1241", "= len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594 # expected value is taken from numpy sum,", "pytest.approx((expected_weighted_sum * 2,), 2e-4) assert counter.counts == (n_events * 2, pytest.approx(expected_weighted_sum * 2,", "== (0.0) def test_increment_mc(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594", "and due to optimization # see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx(np.array([expected_weighted_sum]), 1e-4) assert", "weight_names assert c.counts == (0, 0.0) assert c._w_counts == (0.0) def test_increment_mc(counter, full_wrapped_tree):", "but awkward sum is used # the difference is small and due to", "to optimization # see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx((expected_weighted_sum * 2,), 2e-4) assert", "weight_names(): return [ \"EventWeight\", # \"MuonWeight\", \"ElectronWeight\", \"JetWeight\", ] @pytest.fixture def counter(weight_names): return", "n_events) def test_add(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) counter.add(counter) n_events = len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594", "test_add(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) counter.add(counter) n_events = len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594 # expected", "= len(full_wrapped_tree) with pytest.raises(IndexError): assert counter._w_counts[0] == n_events assert counter.counts == (n_events, )", "optimization # see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx((expected_weighted_sum * 2,), 2e-4) assert counter.counts", "counter.increment(full_wrapped_tree, is_mc=False) n_events = len(full_wrapped_tree) assert counter._w_counts == (n_events) assert counter.counts == (n_events,", "test_increment_data(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=False) n_events = len(full_wrapped_tree) assert counter._w_counts == (n_events) assert counter.counts", "= Counter([]) counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree) with pytest.raises(IndexError): assert counter._w_counts[0] == n_events", "and due to optimization # see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx((expected_weighted_sum * 2,),", "is_mc=True) counter.add(counter) n_events = len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594 # expected value is taken", "small and due to optimization # see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx((expected_weighted_sum *", "return [ \"EventWeight\", # \"MuonWeight\", \"ElectronWeight\", \"JetWeight\", ] @pytest.fixture def counter(weight_names): return Counter(weight_names)", "to optimization # see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx(np.array([expected_weighted_sum]), 1e-4) assert counter.counts ==", "0.0) assert c._w_counts == (0.0) def test_increment_mc(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree)", "assert counter.counts == (n_events, n_events) def test_add(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) counter.add(counter) n_events =", "Counter([]) counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree) with pytest.raises(IndexError): assert counter._w_counts[0] == n_events assert", "counter.counts == (n_events, pytest.approx(expected_weighted_sum, 1e-4)) def test_increment_data(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=False) n_events = len(full_wrapped_tree)", "def counter(weight_names): return Counter(weight_names) def test_init(weight_names, full_wrapped_tree): c = Counter(weight_names) assert c._weight_names ==", "expected value is taken from numpy sum, but awkward sum is used #", "pytest.approx(expected_weighted_sum, 1e-4)) def test_increment_data(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=False) n_events = len(full_wrapped_tree) assert counter._w_counts ==", "(n_events * 2, pytest.approx(expected_weighted_sum * 2, 2e-4)) def test_increment_without_weights(full_wrapped_tree): counter = Counter([]) counter.increment(full_wrapped_tree,", "counter(weight_names): return Counter(weight_names) def test_init(weight_names, full_wrapped_tree): c = Counter(weight_names) assert c._weight_names == weight_names", "def test_init(weight_names, full_wrapped_tree): c = Counter(weight_names) assert c._weight_names == weight_names assert c.counts ==", "c._w_counts == (0.0) def test_increment_mc(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree) expected_weighted_sum =", "counter._w_counts == (n_events) assert counter.counts == (n_events, n_events) def test_add(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True)", "numpy as np import pytest from fast_carpenter.selection.filters import Counter @pytest.fixture def weight_names(): return", "n_events = len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594 # expected value is taken from numpy", "= 229.94895935058594 # expected value is taken from numpy sum, but awkward sum", "assert counter.counts == (n_events, pytest.approx(expected_weighted_sum, 1e-4)) def test_increment_data(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=False) n_events =", "see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx(np.array([expected_weighted_sum]), 1e-4) assert counter.counts == (n_events, pytest.approx(expected_weighted_sum, 1e-4))", "* 2, pytest.approx(expected_weighted_sum * 2, 2e-4)) def test_increment_without_weights(full_wrapped_tree): counter = Counter([]) counter.increment(full_wrapped_tree, is_mc=True)", "== (0, 0.0) assert c._w_counts == (0.0) def test_increment_mc(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) n_events", "counter._w_counts == pytest.approx((expected_weighted_sum * 2,), 2e-4) assert counter.counts == (n_events * 2, pytest.approx(expected_weighted_sum", "pytest.approx(expected_weighted_sum * 2, 2e-4)) def test_increment_without_weights(full_wrapped_tree): counter = Counter([]) counter.increment(full_wrapped_tree, is_mc=True) n_events =", "len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594 # expected value is taken from numpy sum, but", "due to optimization # see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx(np.array([expected_weighted_sum]), 1e-4) assert counter.counts", "return Counter(weight_names) def test_init(weight_names, full_wrapped_tree): c = Counter(weight_names) assert c._weight_names == weight_names assert", "def test_increment_mc(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594 # expected", "(0.0) def test_increment_mc(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594 #", "\"ElectronWeight\", \"JetWeight\", ] @pytest.fixture def counter(weight_names): return Counter(weight_names) def test_init(weight_names, full_wrapped_tree): c =", "due to optimization # see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx((expected_weighted_sum * 2,), 2e-4)", "is used # the difference is small and due to optimization # see", "n_events = len(full_wrapped_tree) assert counter._w_counts == (n_events) assert counter.counts == (n_events, n_events) def", "[ \"EventWeight\", # \"MuonWeight\", \"ElectronWeight\", \"JetWeight\", ] @pytest.fixture def counter(weight_names): return Counter(weight_names) def", "\"MuonWeight\", \"ElectronWeight\", \"JetWeight\", ] @pytest.fixture def counter(weight_names): return Counter(weight_names) def test_init(weight_names, full_wrapped_tree): c", "assert counter._w_counts == pytest.approx(np.array([expected_weighted_sum]), 1e-4) assert counter.counts == (n_events, pytest.approx(expected_weighted_sum, 1e-4)) def test_increment_data(counter,", "counter._w_counts == pytest.approx(np.array([expected_weighted_sum]), 1e-4) assert counter.counts == (n_events, pytest.approx(expected_weighted_sum, 1e-4)) def test_increment_data(counter, full_wrapped_tree):", "full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=False) n_events = len(full_wrapped_tree) assert counter._w_counts == (n_events) assert counter.counts ==", "== pytest.approx((expected_weighted_sum * 2,), 2e-4) assert counter.counts == (n_events * 2, pytest.approx(expected_weighted_sum *", "counter.counts == (n_events, n_events) def test_add(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) counter.add(counter) n_events = len(full_wrapped_tree)", "(n_events, pytest.approx(expected_weighted_sum, 1e-4)) def test_increment_data(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=False) n_events = len(full_wrapped_tree) assert counter._w_counts", "sum, but awkward sum is used # the difference is small and due", "Counter(weight_names) assert c._weight_names == weight_names assert c.counts == (0, 0.0) assert c._w_counts ==", "1e-4)) def test_increment_data(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=False) n_events = len(full_wrapped_tree) assert counter._w_counts == (n_events)", "test_increment_without_weights(full_wrapped_tree): counter = Counter([]) counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree) with pytest.raises(IndexError): assert counter._w_counts[0]", "optimization # see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx(np.array([expected_weighted_sum]), 1e-4) assert counter.counts == (n_events,", "test_increment_mc(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594 # expected value", "counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree) with pytest.raises(IndexError): assert counter._w_counts[0] == n_events assert counter.counts", "c.counts == (0, 0.0) assert c._w_counts == (0.0) def test_increment_mc(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True)", "== (n_events, pytest.approx(expected_weighted_sum, 1e-4)) def test_increment_data(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=False) n_events = len(full_wrapped_tree) assert", "np import pytest from fast_carpenter.selection.filters import Counter @pytest.fixture def weight_names(): return [ \"EventWeight\",", "full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594 # expected value is", "(0, 0.0) assert c._w_counts == (0.0) def test_increment_mc(counter, full_wrapped_tree): counter.increment(full_wrapped_tree, is_mc=True) n_events =", "assert counter.counts == (n_events * 2, pytest.approx(expected_weighted_sum * 2, 2e-4)) def test_increment_without_weights(full_wrapped_tree): counter", "assert counter._w_counts == (n_events) assert counter.counts == (n_events, n_events) def test_add(counter, full_wrapped_tree): counter.increment(full_wrapped_tree,", "counter.increment(full_wrapped_tree, is_mc=True) n_events = len(full_wrapped_tree) expected_weighted_sum = 229.94895935058594 # expected value is taken", "] @pytest.fixture def counter(weight_names): return Counter(weight_names) def test_init(weight_names, full_wrapped_tree): c = Counter(weight_names) assert", "fast_carpenter.selection.filters import Counter @pytest.fixture def weight_names(): return [ \"EventWeight\", # \"MuonWeight\", \"ElectronWeight\", \"JetWeight\",", "assert counter._w_counts == pytest.approx((expected_weighted_sum * 2,), 2e-4) assert counter.counts == (n_events * 2,", "from numpy sum, but awkward sum is used # the difference is small", "# the difference is small and due to optimization # see https://github.com/scikit-hep/awkward-1.0/issues/1241 assert", "https://github.com/scikit-hep/awkward-1.0/issues/1241 assert counter._w_counts == pytest.approx(np.array([expected_weighted_sum]), 1e-4) assert counter.counts == (n_events, pytest.approx(expected_weighted_sum, 1e-4)) def", "import pytest from fast_carpenter.selection.filters import Counter @pytest.fixture def weight_names(): return [ \"EventWeight\", #", "# \"MuonWeight\", \"ElectronWeight\", \"JetWeight\", ] @pytest.fixture def counter(weight_names): return Counter(weight_names) def test_init(weight_names, full_wrapped_tree):", "awkward sum is used # the difference is small and due to optimization", "== weight_names assert c.counts == (0, 0.0) assert c._w_counts == (0.0) def test_increment_mc(counter," ]
[]
[ "sys from flask import Flask, jsonify from flask_cors import CORS from flask_migrate import", "environmenet variables from the passed in configuration file from the instance folder if", "'tests': {'route': tests_controller.testsControllerBlueprint, 'url_prefix': '/tests/'}, } for route in routes: blueprint = routes[route]", "create_app(testConfig=None, sqlConnectionString=None): # container and dependency injection configuration setup on controller level container", "flasgger import Swagger from alchemy.common.base import db from marshmallow import Schema, fields, ValidationError,", "# import tables here to be referenced in the alembic migration scripts from", "import Schema, fields, ValidationError, pre_load from controllers import tests_controller from container import Container", "if testConfig is None: # load the instance config, if it exists, when", "= Api(app) swagger = Swagger(app) # set up environmenet variables from the passed", "routes: blueprint = routes[route] app.register_blueprint(blueprint['route'], url_prefix = blueprint['url_prefix']) CORS(app, resources={r\"/*\": {\"origins\": \"*\"}}) app.config['CORS_HEADERS']", "import tables here to be referenced in the alembic migration scripts from alchemy.tables.test_defintion_table", "instance_relative_config=True) api = Api(app) swagger = Swagger(app) # set up environmenet variables from", "# load the instance config, if it exists, when not testing app.config.from_pyfile('config_dev.py', silent=False)", "Migrate from flask_restplus import Api from flasgger import Swagger from alchemy.common.base import db", "app.register_blueprint(blueprint['route'], url_prefix = blueprint['url_prefix']) CORS(app, resources={r\"/*\": {\"origins\": \"*\"}}) app.config['CORS_HEADERS'] = 'Content-Type' return app", "import CORS from flask_migrate import Migrate from flask_restplus import Api from flasgger import", "container and dependency injection configuration setup on controller level container = Container() container.wire(modules=[tests_controller])", "Container def create_app(testConfig=None, sqlConnectionString=None): # container and dependency injection configuration setup on controller", "create and configure the app app = Flask(__name__, instance_relative_config=True) api = Api(app) swagger", "tests_controller from container import Container def create_app(testConfig=None, sqlConnectionString=None): # container and dependency injection", "flask import Flask, jsonify from flask_cors import CORS from flask_migrate import Migrate from", "test config if passed in app.config.from_pyfile(testConfig, silent=False) if sqlConnectionString is not None: app.config['SQLALCHEMY_DATABASE_URI']=sqlConnectionString", "it exists, when not testing app.config.from_pyfile('config_dev.py', silent=False) else: # load the test config", "instance folder if testConfig is None: # load the instance config, if it", "if passed in app.config.from_pyfile(testConfig, silent=False) if sqlConnectionString is not None: app.config['SQLALCHEMY_DATABASE_URI']=sqlConnectionString # import", "marshmallow import Schema, fields, ValidationError, pre_load from controllers import tests_controller from container import", "render_as_batch=True) # Register blueprints routes = { 'tests': {'route': tests_controller.testsControllerBlueprint, 'url_prefix': '/tests/'}, }", "up environmenet variables from the passed in configuration file from the instance folder", "from flasgger import Swagger from alchemy.common.base import db from marshmallow import Schema, fields,", "in configuration file from the instance folder if testConfig is None: # load", "set up environmenet variables from the passed in configuration file from the instance", "config if passed in app.config.from_pyfile(testConfig, silent=False) if sqlConnectionString is not None: app.config['SQLALCHEMY_DATABASE_URI']=sqlConnectionString #", "if sqlConnectionString is not None: app.config['SQLALCHEMY_DATABASE_URI']=sqlConnectionString # import tables here to be referenced", "flask_restplus import Api from flasgger import Swagger from alchemy.common.base import db from marshmallow", "instance config, if it exists, when not testing app.config.from_pyfile('config_dev.py', silent=False) else: # load", "silent=False) if sqlConnectionString is not None: app.config['SQLALCHEMY_DATABASE_URI']=sqlConnectionString # import tables here to be", "= Flask(__name__, instance_relative_config=True) api = Api(app) swagger = Swagger(app) # set up environmenet", "{ 'tests': {'route': tests_controller.testsControllerBlueprint, 'url_prefix': '/tests/'}, } for route in routes: blueprint =", "on controller level container = Container() container.wire(modules=[tests_controller]) # create and configure the app", "passed in configuration file from the instance folder if testConfig is None: #", "folder if testConfig is None: # load the instance config, if it exists,", "tables here to be referenced in the alembic migration scripts from alchemy.tables.test_defintion_table import", "here to be referenced in the alembic migration scripts from alchemy.tables.test_defintion_table import TestDefinition", "= Container() container.wire(modules=[tests_controller]) # create and configure the app app = Flask(__name__, instance_relative_config=True)", "TestDefinition db.init_app(app) migrate = Migrate(app, db, render_as_batch=True) # Register blueprints routes = {", "import Container def create_app(testConfig=None, sqlConnectionString=None): # container and dependency injection configuration setup on", "configuration setup on controller level container = Container() container.wire(modules=[tests_controller]) # create and configure", "the instance folder if testConfig is None: # load the instance config, if", "alchemy.common.base import db from marshmallow import Schema, fields, ValidationError, pre_load from controllers import", "alembic migration scripts from alchemy.tables.test_defintion_table import TestDefinition db.init_app(app) migrate = Migrate(app, db, render_as_batch=True)", "the test config if passed in app.config.from_pyfile(testConfig, silent=False) if sqlConnectionString is not None:", "from alchemy.tables.test_defintion_table import TestDefinition db.init_app(app) migrate = Migrate(app, db, render_as_batch=True) # Register blueprints", "= Swagger(app) # set up environmenet variables from the passed in configuration file", "file from the instance folder if testConfig is None: # load the instance", "in app.config.from_pyfile(testConfig, silent=False) if sqlConnectionString is not None: app.config['SQLALCHEMY_DATABASE_URI']=sqlConnectionString # import tables here", "from flask_cors import CORS from flask_migrate import Migrate from flask_restplus import Api from", "app.config.from_pyfile('config_dev.py', silent=False) else: # load the test config if passed in app.config.from_pyfile(testConfig, silent=False)", "not None: app.config['SQLALCHEMY_DATABASE_URI']=sqlConnectionString # import tables here to be referenced in the alembic", "import tests_controller from container import Container def create_app(testConfig=None, sqlConnectionString=None): # container and dependency", "import TestDefinition db.init_app(app) migrate = Migrate(app, db, render_as_batch=True) # Register blueprints routes =", "load the instance config, if it exists, when not testing app.config.from_pyfile('config_dev.py', silent=False) else:", "container = Container() container.wire(modules=[tests_controller]) # create and configure the app app = Flask(__name__,", "routes[route] app.register_blueprint(blueprint['route'], url_prefix = blueprint['url_prefix']) CORS(app, resources={r\"/*\": {\"origins\": \"*\"}}) app.config['CORS_HEADERS'] = 'Content-Type' return", "CORS from flask_migrate import Migrate from flask_restplus import Api from flasgger import Swagger", "'url_prefix': '/tests/'}, } for route in routes: blueprint = routes[route] app.register_blueprint(blueprint['route'], url_prefix =", "migration scripts from alchemy.tables.test_defintion_table import TestDefinition db.init_app(app) migrate = Migrate(app, db, render_as_batch=True) #", "import Api from flasgger import Swagger from alchemy.common.base import db from marshmallow import", "= routes[route] app.register_blueprint(blueprint['route'], url_prefix = blueprint['url_prefix']) CORS(app, resources={r\"/*\": {\"origins\": \"*\"}}) app.config['CORS_HEADERS'] = 'Content-Type'", "Swagger(app) # set up environmenet variables from the passed in configuration file from", "sqlConnectionString=None): # container and dependency injection configuration setup on controller level container =", "} for route in routes: blueprint = routes[route] app.register_blueprint(blueprint['route'], url_prefix = blueprint['url_prefix']) CORS(app,", "'/tests/'}, } for route in routes: blueprint = routes[route] app.register_blueprint(blueprint['route'], url_prefix = blueprint['url_prefix'])", "# container and dependency injection configuration setup on controller level container = Container()", "exists, when not testing app.config.from_pyfile('config_dev.py', silent=False) else: # load the test config if", "and configure the app app = Flask(__name__, instance_relative_config=True) api = Api(app) swagger =", "db from marshmallow import Schema, fields, ValidationError, pre_load from controllers import tests_controller from", "# create and configure the app app = Flask(__name__, instance_relative_config=True) api = Api(app)", "alchemy.tables.test_defintion_table import TestDefinition db.init_app(app) migrate = Migrate(app, db, render_as_batch=True) # Register blueprints routes", "fields, ValidationError, pre_load from controllers import tests_controller from container import Container def create_app(testConfig=None,", "level container = Container() container.wire(modules=[tests_controller]) # create and configure the app app =", "flask_cors import CORS from flask_migrate import Migrate from flask_restplus import Api from flasgger", "Register blueprints routes = { 'tests': {'route': tests_controller.testsControllerBlueprint, 'url_prefix': '/tests/'}, } for route", "# Register blueprints routes = { 'tests': {'route': tests_controller.testsControllerBlueprint, 'url_prefix': '/tests/'}, } for", "to be referenced in the alembic migration scripts from alchemy.tables.test_defintion_table import TestDefinition db.init_app(app)", "when not testing app.config.from_pyfile('config_dev.py', silent=False) else: # load the test config if passed", "Schema, fields, ValidationError, pre_load from controllers import tests_controller from container import Container def", "sqlConnectionString is not None: app.config['SQLALCHEMY_DATABASE_URI']=sqlConnectionString # import tables here to be referenced in", "# set up environmenet variables from the passed in configuration file from the", "import Swagger from alchemy.common.base import db from marshmallow import Schema, fields, ValidationError, pre_load", "Flask(__name__, instance_relative_config=True) api = Api(app) swagger = Swagger(app) # set up environmenet variables", "in the alembic migration scripts from alchemy.tables.test_defintion_table import TestDefinition db.init_app(app) migrate = Migrate(app,", "{'route': tests_controller.testsControllerBlueprint, 'url_prefix': '/tests/'}, } for route in routes: blueprint = routes[route] app.register_blueprint(blueprint['route'],", "Api from flasgger import Swagger from alchemy.common.base import db from marshmallow import Schema,", "dependency injection configuration setup on controller level container = Container() container.wire(modules=[tests_controller]) # create", "# load the test config if passed in app.config.from_pyfile(testConfig, silent=False) if sqlConnectionString is", "def create_app(testConfig=None, sqlConnectionString=None): # container and dependency injection configuration setup on controller level", "= Migrate(app, db, render_as_batch=True) # Register blueprints routes = { 'tests': {'route': tests_controller.testsControllerBlueprint,", "scripts from alchemy.tables.test_defintion_table import TestDefinition db.init_app(app) migrate = Migrate(app, db, render_as_batch=True) # Register", "variables from the passed in configuration file from the instance folder if testConfig", "if it exists, when not testing app.config.from_pyfile('config_dev.py', silent=False) else: # load the test", "app.config.from_pyfile(testConfig, silent=False) if sqlConnectionString is not None: app.config['SQLALCHEMY_DATABASE_URI']=sqlConnectionString # import tables here to", "from flask_restplus import Api from flasgger import Swagger from alchemy.common.base import db from", "route in routes: blueprint = routes[route] app.register_blueprint(blueprint['route'], url_prefix = blueprint['url_prefix']) CORS(app, resources={r\"/*\": {\"origins\":", "and dependency injection configuration setup on controller level container = Container() container.wire(modules=[tests_controller]) #", "swagger = Swagger(app) # set up environmenet variables from the passed in configuration", "coding:utf-8 import sys from flask import Flask, jsonify from flask_cors import CORS from", "from the instance folder if testConfig is None: # load the instance config,", "container import Container def create_app(testConfig=None, sqlConnectionString=None): # container and dependency injection configuration setup", "container.wire(modules=[tests_controller]) # create and configure the app app = Flask(__name__, instance_relative_config=True) api =", "api = Api(app) swagger = Swagger(app) # set up environmenet variables from the", "app.config['SQLALCHEMY_DATABASE_URI']=sqlConnectionString # import tables here to be referenced in the alembic migration scripts", "db.init_app(app) migrate = Migrate(app, db, render_as_batch=True) # Register blueprints routes = { 'tests':", "be referenced in the alembic migration scripts from alchemy.tables.test_defintion_table import TestDefinition db.init_app(app) migrate", "configuration file from the instance folder if testConfig is None: # load the", "migrate = Migrate(app, db, render_as_batch=True) # Register blueprints routes = { 'tests': {'route':", "routes = { 'tests': {'route': tests_controller.testsControllerBlueprint, 'url_prefix': '/tests/'}, } for route in routes:", "is None: # load the instance config, if it exists, when not testing", "injection configuration setup on controller level container = Container() container.wire(modules=[tests_controller]) # create and", "for route in routes: blueprint = routes[route] app.register_blueprint(blueprint['route'], url_prefix = blueprint['url_prefix']) CORS(app, resources={r\"/*\":", "db, render_as_batch=True) # Register blueprints routes = { 'tests': {'route': tests_controller.testsControllerBlueprint, 'url_prefix': '/tests/'},", "silent=False) else: # load the test config if passed in app.config.from_pyfile(testConfig, silent=False) if", "from flask_migrate import Migrate from flask_restplus import Api from flasgger import Swagger from", "jsonify from flask_cors import CORS from flask_migrate import Migrate from flask_restplus import Api", "from flask import Flask, jsonify from flask_cors import CORS from flask_migrate import Migrate", "import db from marshmallow import Schema, fields, ValidationError, pre_load from controllers import tests_controller", "the app app = Flask(__name__, instance_relative_config=True) api = Api(app) swagger = Swagger(app) #", "blueprints routes = { 'tests': {'route': tests_controller.testsControllerBlueprint, 'url_prefix': '/tests/'}, } for route in", "Swagger from alchemy.common.base import db from marshmallow import Schema, fields, ValidationError, pre_load from", "configure the app app = Flask(__name__, instance_relative_config=True) api = Api(app) swagger = Swagger(app)", "passed in app.config.from_pyfile(testConfig, silent=False) if sqlConnectionString is not None: app.config['SQLALCHEMY_DATABASE_URI']=sqlConnectionString # import tables", "in routes: blueprint = routes[route] app.register_blueprint(blueprint['route'], url_prefix = blueprint['url_prefix']) CORS(app, resources={r\"/*\": {\"origins\": \"*\"}})", "tests_controller.testsControllerBlueprint, 'url_prefix': '/tests/'}, } for route in routes: blueprint = routes[route] app.register_blueprint(blueprint['route'], url_prefix", "from alchemy.common.base import db from marshmallow import Schema, fields, ValidationError, pre_load from controllers", "else: # load the test config if passed in app.config.from_pyfile(testConfig, silent=False) if sqlConnectionString", "from container import Container def create_app(testConfig=None, sqlConnectionString=None): # container and dependency injection configuration", "Container() container.wire(modules=[tests_controller]) # create and configure the app app = Flask(__name__, instance_relative_config=True) api", "blueprint = routes[route] app.register_blueprint(blueprint['route'], url_prefix = blueprint['url_prefix']) CORS(app, resources={r\"/*\": {\"origins\": \"*\"}}) app.config['CORS_HEADERS'] =", "import Flask, jsonify from flask_cors import CORS from flask_migrate import Migrate from flask_restplus", "testing app.config.from_pyfile('config_dev.py', silent=False) else: # load the test config if passed in app.config.from_pyfile(testConfig,", "# coding:utf-8 import sys from flask import Flask, jsonify from flask_cors import CORS", "app = Flask(__name__, instance_relative_config=True) api = Api(app) swagger = Swagger(app) # set up", "from marshmallow import Schema, fields, ValidationError, pre_load from controllers import tests_controller from container", "None: # load the instance config, if it exists, when not testing app.config.from_pyfile('config_dev.py',", "pre_load from controllers import tests_controller from container import Container def create_app(testConfig=None, sqlConnectionString=None): #", "config, if it exists, when not testing app.config.from_pyfile('config_dev.py', silent=False) else: # load the", "Migrate(app, db, render_as_batch=True) # Register blueprints routes = { 'tests': {'route': tests_controller.testsControllerBlueprint, 'url_prefix':", "testConfig is None: # load the instance config, if it exists, when not", "Flask, jsonify from flask_cors import CORS from flask_migrate import Migrate from flask_restplus import", "None: app.config['SQLALCHEMY_DATABASE_URI']=sqlConnectionString # import tables here to be referenced in the alembic migration", "controllers import tests_controller from container import Container def create_app(testConfig=None, sqlConnectionString=None): # container and", "import sys from flask import Flask, jsonify from flask_cors import CORS from flask_migrate", "from controllers import tests_controller from container import Container def create_app(testConfig=None, sqlConnectionString=None): # container", "app app = Flask(__name__, instance_relative_config=True) api = Api(app) swagger = Swagger(app) # set", "import Migrate from flask_restplus import Api from flasgger import Swagger from alchemy.common.base import", "the alembic migration scripts from alchemy.tables.test_defintion_table import TestDefinition db.init_app(app) migrate = Migrate(app, db,", "ValidationError, pre_load from controllers import tests_controller from container import Container def create_app(testConfig=None, sqlConnectionString=None):", "controller level container = Container() container.wire(modules=[tests_controller]) # create and configure the app app", "the passed in configuration file from the instance folder if testConfig is None:", "is not None: app.config['SQLALCHEMY_DATABASE_URI']=sqlConnectionString # import tables here to be referenced in the", "flask_migrate import Migrate from flask_restplus import Api from flasgger import Swagger from alchemy.common.base", "setup on controller level container = Container() container.wire(modules=[tests_controller]) # create and configure the", "the instance config, if it exists, when not testing app.config.from_pyfile('config_dev.py', silent=False) else: #", "referenced in the alembic migration scripts from alchemy.tables.test_defintion_table import TestDefinition db.init_app(app) migrate =", "Api(app) swagger = Swagger(app) # set up environmenet variables from the passed in", "= { 'tests': {'route': tests_controller.testsControllerBlueprint, 'url_prefix': '/tests/'}, } for route in routes: blueprint", "not testing app.config.from_pyfile('config_dev.py', silent=False) else: # load the test config if passed in", "load the test config if passed in app.config.from_pyfile(testConfig, silent=False) if sqlConnectionString is not", "from the passed in configuration file from the instance folder if testConfig is" ]
[ "= fiducial.get('Omega_m', None) self._Omega_de = fiducial.get('Omega_de', None) if not config.getboolean('old_growth_func', False): self.xi_growth =", "# Initialize the broadband and check # if we need to add or", "self._tracer2['type'] # Get rescaled Xi coordinates delta_rp = params.get(self._delta_rp_name, 0.) ap, at =", "---------- bb_term : dict broadband term config params : dict Computation parameters Returns", "# Calculate D1 in 100 values of z between 0 and zmax, then", "evol', 'standard') # Compute the bias evolution using the right model if 'croom'", "object used to turn Pk into Xi params : dict Computation parameters Returns", "bb['bin_size_rp'] = config['bin_size_rp'] self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb) def compute_broadband(self, params, pos_type): \"\"\"Compute", "params, pos_type): \"\"\"Compute the broadband terms for one position (pre-distortion/post-distortion) and one type", "tracer2['type'] == 'discrete' and tracer1['type'] != 'discrete': self._delta_rp_name = 'drp_' + tracer2['name'] #", "a, args=pars)[0] D1 = interp1d(z, D1) growth = D1(z_grid) / D1(z_fid) return growth**2", "interp1d(z, D1) growth = D1(z_grid) / D1(z_fid) return growth**2 def _init_broadband(self, bb_config): \"\"\"Initialize", "self.broadband_sky(bb_term, params) if 'mul' in pos_type: corr = 1 + corr elif 'mul'", "None: corr = self.broadband(bb_term, params) if 'mul' in pos_type: corr = 1 +", "QSO radiation model \"\"\" assert 'QSO' in [self._tracer1['name'], self._tracer2['name']] assert self._tracer1['name'] != self._tracer2['name']", "= self._z if z_fid is None: z_fid = self._z_fid if Omega_m is None:", "* (rt / sigma)**2) w = (rp >= 0.) & (rp < bb_term['bin_size_rp'])", "fiducial : dict fiducial config coords_grid : dict Dictionary with coordinate grid -", "1) corr = (bb_params[:, :, None, None] * r1**r1_powers[:, None, None] * r2**r2_powers[None,", "name bb['func'] = config['func'] bb['bin_size_rp'] = config['bin_size_rp'] self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb) def", "Cosmology book. Returns ------- ND Array Growth factor \"\"\" # Check the defaults", "Add growth xi *= self.xi_growth # Add QSO radiation modeling for cross if", "Output broadband \"\"\" assert pos_type in ['pre-mul', 'pre-add', 'post-mul', 'post-add'] corr = None", "5. z = zmax * np.arange(nbins, dtype=float) / (nbins-1) D1 = np.zeros(nbins, dtype=float)", "= params['qso_rad_decrease'] # Compute the QSO radiation model xi_rad = strength / (r_shift**2)", "5/2.*Omega_m*hubble(z[i], *pars)*quad(dD1, 0, a, args=pars)[0] D1 = interp1d(z, D1) growth = D1(z_grid) /", "# Add relativistic effects if self.relativistic_flag: xi += self.compute_xi_relativistic(pk_lin, PktoXi_obj, params) # Add", "= utils.growth_function(z_grid, Omega_m, Omega_de) # Scale to the fiducial redshift growth /= utils.growth_function(z_fid,", "redshift, used for discrete tracers, by default 0. Returns ------- ND Array Rescaled", "index, config in enumerate(sky_broadbands): assert config['rp_rt'] == 'rp,rt' # Create the name for", "# Add QSO radiation modeling for cross if self.radiation_flag and not params['peak']: xi", "Check if we need delta rp (Only for the cross) self._delta_rp_name = None", ": ND array Array of radius coords of Xi mu : ND array", "nbins = 100 zmax = 5. z = zmax * np.arange(nbins, dtype=float) /", "of mu = rp/r coords of Xi ap : float Alpha parallel at", "'discrete' and tracer2['type'] != 'discrete': self._delta_rp_name = 'drp_' + tracer1['name'] elif tracer2['type'] ==", "def compute_broadband(self, params, pos_type): \"\"\"Compute the broadband terms for one position (pre-distortion/post-distortion) and", "the cross (QSOxLya)') # Check for relativistic effects and standard asymmetry self.relativistic_flag =", "+ z_fid) / (1. + z_grid) return growth**2 # Compute the growth at", "params : dict Computation parameters Returns ------- ND Array Bias evolution for tracer", "\"-\" + config['type']].append(bb) # Next pick up the sky broadban terms sky_broadbands =", "the fiducial redshift growth /= utils.growth_function(z_fid, Omega_m, Omega_de) return growth**2 def compute_growth_old(self, z_grid=None,", "= self._mu if bb_term['rp_rt'] == 'rp,rt': r1 = self._r / 100. * self._mu", "config['type']].append(bb) # Next pick up the sky broadban terms sky_broadbands = [el for", "z_fid=None, Omega_m=None, Omega_de=None): def hubble(z, Omega_m, Omega_de): return np.sqrt(Omega_m*(1+z)**3 + Omega_de + (1-Omega_m-Omega_de)*(1+z)**2)", "# Initialize the broadband self.has_bb = False if bb_config is not None: self._init_broadband(bb_config)", "None # Loop over the right pos/type configuration for bb_term in self.bb_terms[pos_type]: #", "mu \"\"\" mask = r != 0 rp = r[mask] * mu[mask] +", "'pre-add' or 'post-mul' or 'post-add' Returns ------- 1d Array Output broadband \"\"\" assert", "= scale_params self._metal_corr = metal_corr # Check if we need delta rp (Only", "Scale to the fiducial redshift growth /= utils.growth_function(z_fid, Omega_m, Omega_de) return growth**2 def", "broadband term dictionary bb = {} bb['name'] = name bb['func'] = config['func'] bb['rp_rt']", "return corr def compute_qso_radiation(self, params): \"\"\"Model the contribution of QSO radiation to the", "+ p1*(1. + self._z)**2) / (p0 + p1 * (1 + self._z_eff)**2) return", "dict Dictionary with coordinate grid - r, mu, z scale_params : ScaleParameters ScaleParameters", "Calculate D1 in 100 values of z between 0 and zmax, then interpolate", "------- 1D Array Output xi relativistic \"\"\" assert 'continuous' in [self._tracer1['type'], self._tracer2['type']] assert", "the transform object used to turn Pk into Xi params : dict Computation", "= [] # First pick up the normal broadband terms normal_broadbands = [el", "we need to add or multiply if corr is None: corr = self.broadband(bb_term,", "self._scale_params = scale_params self._metal_corr = metal_corr # Check if we need delta rp", "Implements eq. 7.77 from <NAME>'s Modern Cosmology book. Returns ------- ND Array Growth", "self._mu, ap, at, delta_rp) # Compute the correlation function xi_rel = PktoXi_obj.pk_to_xi_relativistic(rescaled_r, rescaled_mu,", "'discrete': self._delta_rp_name = 'drp_' + tracer2['name'] # Precompute growth self._z_fid = fiducial['z_fiducial'] self._Omega_m", "- self._mu**2) r_min, r_max, dr = bb_term['r_config'] mu_min, mu_max, dmu = bb_term['mu_config'] r1_powers", "self._z_eff) self._scale_params = scale_params self._metal_corr = metal_corr # Check if we need delta", "= rp/r coords of Xi ap : float Alpha parallel at : float", "tracer 1 tracer2 : dict Config of tracer 2 bb_config : list, optional", "el['func'] != 'broadband_sky'] for index, config in enumerate(normal_broadbands): # Create the name for", "Hankel transform of the input P(k), sums the necessary multipoles and rescales the", "corr elif 'mul' in pos_type: corr *= 1 + self.broadband(bb_term, params) else: corr", "import numpy as np from scipy.integrate import quad from scipy.interpolate import interp1d from", "/ lifetime + 1 / decrease)) return xi_rad def compute_xi_relativistic(self, pk, PktoXi_obj, params):", "can only be applied to the cross (QSOxLya)') # Check for relativistic effects", "corr += self.broadband(bb_term, params) else: # Initialize the broadband and check # if", "core of the correlation function. This does the Hankel transform of the input", "if Omega_de is None: Omega_de = self._Omega_de # Check if we have dark", "+ \"-\" + config['type']].append(bb) def compute_broadband(self, params, pos_type): \"\"\"Compute the broadband terms for", "growth = D1(z_grid) / D1(z_fid) return growth**2 def _init_broadband(self, bb_config): \"\"\"Initialize the broadband", "of QSO radiation to the cross (the transverse proximity effect) Parameters ---------- params", "core xi = self.compute_core(pk, PktoXi_obj, params) # Add bias evolution xi *= self.compute_bias_evol(params)", "at * rt rescaled_r = np.zeros(len(r)) rescaled_mu = np.zeros(len(mu)) rescaled_r[mask] = np.sqrt(rescaled_rp**2 +", "(1 + self._z_eff)**2) return bias_z def compute_growth(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): \"\"\"Compute growth", "Compute the QSO radiation model xi_rad = strength / (r_shift**2) * (1 -", "\"\"\" self._config = config self._r = coords_grid['r'] self._mu = coords_grid['mu'] self._z = coords_grid['z']", "pk_lin, PktoXi_obj, params): \"\"\"Compute correlation function for input P(k). Parameters ---------- pk :", "if z_grid is None: z_grid = self._z if z_fid is None: z_fid =", "only called once # ! Compute is called many times and should be", "[self._tracer1['name'], self._tracer2['name']] if not ('QSO' in names and 'LYA' in names): raise ValueError('You", "growth self._z_fid = fiducial['z_fiducial'] self._Omega_m = fiducial.get('Omega_m', None) self._Omega_de = fiducial.get('Omega_de', None) if", "pos_type : string String with position and type, must be one of: 'pre-mul'", "delta_rp = params.get(self._delta_rp_name, 0.) rp = self._r * self._mu + delta_rp rt =", "broadband self.has_bb = False if bb_config is not None: self._init_broadband(bb_config) self.has_bb = True", "\"\"\"Compute sky broadband term. Calculates a Gaussian broadband in rp,rt for the sky", "config in enumerate(sky_broadbands): assert config['rp_rt'] == 'rp,rt' # Create the name for the", "asymmetry' in self._config: self.asymmetry_flag = self._config.getboolean('standard asymmetry') if self.relativistic_flag or self.asymmetry_flag: types =", "residuals. Parameters ---------- bb_term : dict broadband term config params : dict Computation", "broadband_sky(self, bb_term, params): \"\"\"Compute sky broadband term. Calculates a Gaussian broadband in rp,rt", "params, tracer_name): \"\"\"Compute tracer bias evolution. Parameters ---------- params : dict Computation parameters", "Get rescaled Xi coordinates delta_rp = params.get(self._delta_rp_name, 0.) ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr)", "= ap * rp rescaled_rt = at * rt rescaled_r = np.zeros(len(r)) rescaled_mu", "dict broadband term config params : dict Computation parameters Returns ------- 1d Array", "rp = self._r * self._mu + delta_rp rt = self._r * np.sqrt(1 -", "params.get(self._delta_rp_name, 0.) rp = self._r * self._mu + delta_rp rt = self._r *", "__init__(self, config, fiducial, coords_grid, scale_params, tracer1, tracer2, bb_config=None, metal_corr=False): \"\"\" Parameters ---------- config", "discrete tracers, by default 0. Returns ------- ND Array Rescaled radii ND Array", "the correlation function xi_rel = PktoXi_obj.pk_to_xi_relativistic(rescaled_r, rescaled_mu, pk, params) return xi_rel def compute_xi_asymmetry(self,", "r and mu grids delta_rp = params.get(self._delta_rp_name, 0.) rp = self._r * self._mu", "corr is None: corr = self.broadband_sky(bb_term, params) if 'mul' in pos_type: corr =", "0.) # Get rescaled Xi coordinates ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu", "PktoXi_obj.compute(rescaled_r, rescaled_mu, pk, self._multipole) return xi @staticmethod def _rescale_coords(r, mu, ap, at, delta_rp=0.):", "None: Omega_de = self._Omega_de # Check if we have dark energy if Omega_de", "r2_powers: bb_params.append(params['{} ({},{})'.format( bb_term['name'], i, j)]) bb_params = np.array(bb_params).reshape(-1, r_max - r_min +", "string Tracer name Returns ------- ND Array Bias evolution for tracer \"\"\" p0", "is None: z_fid = self._z_fid if Omega_m is None: Omega_m = self._Omega_m if", "r_max, dr = bb_term['r_config'] mu_min, mu_max, dmu = bb_term['mu_config'] r1_powers = np.arange(r_min, r_max", "from outside \"\"\" def __init__(self, config, fiducial, coords_grid, scale_params, tracer1, tracer2, bb_config=None, metal_corr=False):", "'post-mul', 'post-add'] corr = None # Loop over the right pos/type configuration for", "pk, self._multipole) return xi @staticmethod def _rescale_coords(r, mu, ap, at, delta_rp=0.): \"\"\"Rescale Xi", "Loop over the right pos/type configuration for bb_term in self.bb_terms[pos_type]: # Check if", "Create the name for the parameters of this term name = 'BB-{}-{} {}", "\"\"\"Compute bias evolution for the correlation function. Parameters ---------- params : dict Computation", "the correlation function. Parameters ---------- params : dict Computation parameters Returns ------- ND", "Omega_de is None: growth = (1 + z_fid) / (1. + z_grid) return", "None, None] * r1**r1_powers[:, None, None] * r2**r2_powers[None, :, None]).sum(axis=(0, 1, 2)) return", "Returns ------- 1D Array Output xi asymmetry \"\"\" assert 'continuous' in [self._tracer1['type'], self._tracer2['type']]", "ND Array Input power spectrum pk_lin : 1D Array Linear isotropic power spectrum", "params.get(self._delta_rp_name, 0.) ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu = self._rescale_coords(self._r, self._mu, ap,", "corr elif 'mul' in pos_type: corr *= 1 + self.broadband_sky(bb_term, params) else: corr", "scale / (sigma * np.sqrt(2. * np.pi)) corr *= np.exp(-0.5 * (rt /", "= params['qso_rad_asymmetry'] lifetime = params['qso_rad_lifetime'] decrease = params['qso_rad_decrease'] # Compute the QSO radiation", "not in types) or (types[0] == types[1]): raise ValueError('You asked for relativistic effects", "= config['func'] bb['rp_rt'] = config['rp_rt'] bb['r_config'] = config['r_config'] bb['mu_config'] = config['mu_config'] self.bb_terms[config['pre'] +", "= {} bb['name'] = name bb['func'] = config['func'] bb['bin_size_rp'] = config['bin_size_rp'] self.bb_terms[config['pre'] +", "for delta rp delta_rp = 0. if self._delta_rp_name is not None: delta_rp =", "dict Config of tracer 2 bb_config : list, optional list with configs of", ": dict Computation parameters Returns ------- 1d Array Output broadband \"\"\" rp =", "z_grid) return growth**2 # Compute the growth at each redshift on the grid", "== 'discrete' and tracer2['type'] != 'discrete': self._delta_rp_name = 'drp_' + tracer1['name'] elif tracer2['type']", "delta_rp = params.get(self._delta_rp_name, 0.) ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu = self._rescale_coords(self._r,", "self._mu rt = self._r * np.sqrt(1 - self._mu**2) scale = params[bb_term['name'] + '-scale-sky']", "/ (sigma * np.sqrt(2. * np.pi)) corr *= np.exp(-0.5 * (rt / sigma)**2)", "rt = self._r * np.sqrt(1 - self._mu**2) scale = params[bb_term['name'] + '-scale-sky'] sigma", "class CorrelationFunction: \"\"\"Correlation function computation and handling. # ! Slow operations should be", "Add standard asymmetry if self.asymmetry_flag: xi += self.compute_xi_asymmetry(pk_lin, PktoXi_obj, params) return xi def", "cross (QSOxLya)') # Check for relativistic effects and standard asymmetry self.relativistic_flag = False", "dr) r2_powers = np.arange(mu_min, mu_max + 1, dmu) bb_params = [] for i", "self._mu r2 = self._r / 100. * np.sqrt(1 - self._mu**2) r_min, r_max, dr", "PktoXi_obj, params): \"\"\"Calculate the cross-correlation contribution from relativistic effects (Bonvin et al. 2014).", "utils.growth_function(z_grid, Omega_m, Omega_de) # Scale to the fiducial redshift growth /= utils.growth_function(z_fid, Omega_m,", "delta rp delta_rp = 0. if self._delta_rp_name is not None: delta_rp = params.get(self._delta_rp_name,", "self._tracer1['type'] != self._tracer2['type'] # Get rescaled Xi coordinates delta_rp = params.get(self._delta_rp_name, 0.) ap,", "or multiply if corr is None: corr = self.broadband_sky(bb_term, params) if 'mul' in", "self.compute_bias_evol(params) # Add growth xi *= self.xi_growth # Add QSO radiation modeling for", "Returns ------- ND Array Bias evolution for tracer \"\"\" # Compute the bias", "corr = scale / (sigma * np.sqrt(2. * np.pi)) corr *= np.exp(-0.5 *", "this work for the QSO auto as well? self.radiation_flag = False if 'radiation", "tracer_name): \"\"\"Bias evolution standard model. Parameters ---------- params : dict Computation parameters tracer_name", "self._metal_corr = metal_corr # Check if we need delta rp (Only for the", "enumerate(sky_broadbands): assert config['rp_rt'] == 'rp,rt' # Create the name for the parameters of", "(p0 + p1 * (1 + self._z_eff)**2) return bias_z def compute_growth(self, z_grid=None, z_fid=None,", "D1(z_grid) / D1(z_fid) return growth**2 def _init_broadband(self, bb_config): \"\"\"Initialize the broadband terms. Parameters", "and handling. # ! Slow operations should be kept in init as that", "1/a-1 return 1./(a*hubble(z, Omega_m, Omega_de))**3 # Calculate D1 in 100 values of z", "[self._tracer1['type'], self._tracer2['type']] if ('continuous' not in types) or (types[0] == types[1]): raise ValueError('You", "parameters Returns ------- ND Array Bias evolution for tracer \"\"\" # Compute the", "[] self.bb_terms['post-add'] = [] self.bb_terms['pre-mul'] = [] self.bb_terms['post-mul'] = [] # First pick", "is None: corr = self.broadband_sky(bb_term, params) if 'mul' in pos_type: corr = 1", "== 'rp,rt': r1 = self._r / 100. * self._mu r2 = self._r /", "broadband term dictionary bb = {} bb['name'] = name bb['func'] = config['func'] bb['bin_size_rp']", "pos_type: corr = 1 + corr elif 'mul' in pos_type: corr *= 1", "rescaled_mu = self._rescale_coords(self._r, self._mu, ap, at, delta_rp) # Compute the correlation function xi_asy", "= self._Omega_de # Check if we have dark energy if Omega_de is None:", "dict fiducial config coords_grid : dict Dictionary with coordinate grid - r, mu,", "index, config['type'], config['pre'], config['rp_rt']) # Create the broadband term dictionary bb = {}", "the sky broadban terms sky_broadbands = [el for el in bb_config if el['func']", "---------- params : dict Computation parameters tracer_name : string Name of tracer Returns", "to add or multiply if corr is None: corr = self.broadband_sky(bb_term, params) if", "parameters Returns ------- 1D Array Output xi asymmetry \"\"\" assert 'continuous' in [self._tracer1['type'],", "a power-law broadband in r and mu or rp,rt. Parameters ---------- bb_term :", "and tracer1['type'] != 'discrete': self._delta_rp_name = 'drp_' + tracer2['name'] # Precompute growth self._z_fid", "Array Input power spectrum PktoXi_obj : vega.PktoXi An instance of the transform object", "1 / decrease)) return xi_rad def compute_xi_relativistic(self, pk, PktoXi_obj, params): \"\"\"Calculate the cross-correlation", "z = zmax * np.arange(nbins, dtype=float) / (nbins-1) D1 = np.zeros(nbins, dtype=float) pars", "ap/at. Parameters ---------- r : ND array Array of radius coords of Xi", "= (Omega_m, Omega_de) for i in range(nbins): a = 1/(1+z[i]) D1[i] = 5/2.*Omega_m*hubble(z[i],", "params) # Add standard asymmetry if self.asymmetry_flag: xi += self.compute_xi_asymmetry(pk_lin, PktoXi_obj, params) return", "in names): raise ValueError('You asked for QSO radiation effects, but it' ' can", "if bb_term['rp_rt'] == 'rp,rt': r1 = self._r / 100. * self._mu r2 =", "broadband if bb_term['func'] != 'broadband_sky': # Initialize the broadband and check # if", "mu_min, mu_max, dmu = bb_term['mu_config'] r1_powers = np.arange(r_min, r_max + 1, dr) r2_powers", "!= 0 rp = r[mask] * mu[mask] + delta_rp rt = r[mask] *", "range(nbins): a = 1/(1+z[i]) D1[i] = 5/2.*Omega_m*hubble(z[i], *pars)*quad(dD1, 0, a, args=pars)[0] D1 =", "Computation parameters Returns ------- 1d Array Output broadband \"\"\" rp = self._r *", "parameters Returns ------- 1D Array Output correlation function \"\"\" # Compute the core", "------- 1D Array Output correlation function \"\"\" # Compute the core xi =", "def compute_qso_radiation(self, params): \"\"\"Model the contribution of QSO radiation to the cross (the", "np.sqrt(1 - self._mu**2) r_shift = np.sqrt(rp**2 + rt**2) mu_shift = rp / r_shift", "pos_type: corr = 1. else: corr = 0. return corr def broadband_sky(self, bb_term,", "one position (pre-distortion/post-distortion) and one type (multiplicative/additive). Parameters ---------- params : dict Computation", "Croom model for QSO, see Croom et al. 2005. Parameters ---------- params :", "\"\"\"Compute the broadband terms for one position (pre-distortion/post-distortion) and one type (multiplicative/additive). Parameters", "rescaled_mu = np.zeros(len(mu)) rescaled_r[mask] = np.sqrt(rescaled_rp**2 + rescaled_rt**2) rescaled_mu[mask] = rescaled_rp / rescaled_r[mask]", "multiply if corr is None: corr = self.broadband(bb_term, params) if 'mul' in pos_type:", "in rp,rt for the sky residuals. Parameters ---------- bb_term : dict broadband term", "np.pi)) corr *= np.exp(-0.5 * (rt / sigma)**2) w = (rp >= 0.)", "string Tracer name Returns ------- ND Array Bias evolution for tracer \"\"\" assert", "Array Bias evolution for tracer \"\"\" assert tracer_name == \"QSO\" p0 = params[\"croom_par0\"]", "(1. + z_grid) return growth**2 # Compute the growth at each redshift on", "self._tracer1 = tracer1 self._tracer2 = tracer2 self._z_eff = fiducial['z_eff'] self._rel_z_evol = (1. +", "work for the QSO auto as well? self.radiation_flag = False if 'radiation effects'", "* self._mu r2 = self._r / 100. * np.sqrt(1 - self._mu**2) r_min, r_max,", "coords_grid['r'] self._mu = coords_grid['mu'] self._z = coords_grid['z'] self._multipole = config.getint('single_multipole', -1) self._tracer1 =", "Omega_de = self._Omega_de # Check if we have dark energy if Omega_de is", "need to add or multiply if corr is None: corr = self.broadband(bb_term, params)", "None: corr = self.broadband_sky(bb_term, params) if 'mul' in pos_type: corr = 1 +", "= False if bb_config is not None: self._init_broadband(bb_config) self.has_bb = True # Check", "default None metal_corr : bool, optional Whether this is a metal correlation, by", "bias evolution using the right model if 'croom' in evol_model: bias_evol = self._bias_evol_croom(params,", "'standard') else: evol_model = self._config.get('z evol', 'standard') # Compute the bias evolution using", "many times and should be fast Extensions should have their separate method of", "is None: corr = self.broadband(bb_term, params) if 'mul' in pos_type: corr = 1", "'drp_' + tracer2['name'] # Precompute growth self._z_fid = fiducial['z_fiducial'] self._Omega_m = fiducial.get('Omega_m', None)", "r2 = self._mu if bb_term['rp_rt'] == 'rp,rt': r1 = self._r / 100. *", "'broadband_sky'] for index, config in enumerate(normal_broadbands): # Create the name for the parameters", "xi += self.compute_qso_radiation(params) # Add relativistic effects if self.relativistic_flag: xi += self.compute_xi_relativistic(pk_lin, PktoXi_obj,", "ND Array Rescaled mu \"\"\" mask = r != 0 rp = r[mask]", "is not None: delta_rp = params.get(self._delta_rp_name, 0.) # Get rescaled Xi coordinates ap,", "D1(z_fid) return growth**2 def _init_broadband(self, bb_config): \"\"\"Initialize the broadband terms. Parameters ---------- bb_config", "# Get the QSO radiation model parameters strength = params['qso_rad_strength'] asymmetry = params['qso_rad_asymmetry']", "{} {} {}'.format(config['cf_name'], index, config['type'], config['pre'], config['rp_rt']) # Create the broadband term dictionary", "r_min + 1) corr = (bb_params[:, :, None, None] * r1**r1_powers[:, None, None]", "name = 'BB-{}-{}-{}'.format(config['cf_name'], index + len(normal_broadbands), config['func']) # Create the broadband term dictionary", "self._rescale_coords(self._r, self._mu, ap, at, delta_rp) # Compute correlation function xi = PktoXi_obj.compute(rescaled_r, rescaled_mu,", "corr = None # Loop over the right pos/type configuration for bb_term in", "---------- pk : ND Array Input power spectrum pk_lin : 1D Array Linear", "r and mu or rp,rt. Parameters ---------- bb_term : dict broadband term config", "- nuisance correction for wrong redshift, used for discrete tracers, by default 0.", "1, dmu) bb_params = [] for i in r1_powers: for j in r2_powers:", "parameters strength = params['qso_rad_strength'] asymmetry = params['qso_rad_asymmetry'] lifetime = params['qso_rad_lifetime'] decrease = params['qso_rad_decrease']", "= [self._tracer1['name'], self._tracer2['name']] if not ('QSO' in names and 'LYA' in names): raise", "= np.zeros(len(r)) rescaled_mu = np.zeros(len(mu)) rescaled_r[mask] = np.sqrt(rescaled_rp**2 + rescaled_rt**2) rescaled_mu[mask] = rescaled_rp", "self._get_tracer_evol(params, self._tracer2['name']) return bias_evol def _get_tracer_evol(self, params, tracer_name): \"\"\"Compute tracer bias evolution. Parameters", "An instance of the transform object used to turn Pk into Xi params", "self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb) def compute_broadband(self, params, pos_type): \"\"\"Compute the broadband terms", "fiducial.get('Omega_m', None) self._Omega_de = fiducial.get('Omega_de', None) if not config.getboolean('old_growth_func', False): self.xi_growth = self.compute_growth(self._z,", "bias evolution xi *= self.compute_bias_evol(params) # Add growth xi *= self.xi_growth # Add", "must be one of: 'pre-mul' or 'pre-add' or 'post-mul' or 'post-add' Returns -------", "not None: delta_rp = params.get(self._delta_rp_name, 0.) # Get rescaled Xi coordinates ap, at", "if tracer1['type'] == 'discrete' and tracer2['type'] != 'discrete': self._delta_rp_name = 'drp_' + tracer1['name']", "' but they only work for the cross') def compute(self, pk, pk_lin, PktoXi_obj,", "def hubble(z, Omega_m, Omega_de): return np.sqrt(Omega_m*(1+z)**3 + Omega_de + (1-Omega_m-Omega_de)*(1+z)**2) def dD1(a, Omega_m,", "+ delta_rp rt = self._r * np.sqrt(1 - self._mu**2) r_shift = np.sqrt(rp**2 +", "position and type, must be one of: 'pre-mul' or 'pre-add' or 'post-mul' or", "book. Returns ------- ND Array Growth factor \"\"\" # Check the defaults if", "* (1 - mu_shift**2)) xi_rad *= np.exp(-r_shift * ((1 + mu_shift) / lifetime", "outside \"\"\" def __init__(self, config, fiducial, coords_grid, scale_params, tracer1, tracer2, bb_config=None, metal_corr=False): \"\"\"", "'post-add'] corr = None # Loop over the right pos/type configuration for bb_term", ": string Tracer name Returns ------- ND Array Bias evolution for tracer \"\"\"", "rescaled_rt**2) rescaled_mu[mask] = rescaled_rp / rescaled_r[mask] return rescaled_r, rescaled_mu def compute_bias_evol(self, params): \"\"\"Compute", "self._config.get('z evol', 'standard') # Compute the bias evolution using the right model if", "\"\"\"Model the contribution of QSO radiation to the cross (the transverse proximity effect)", "defaults if corr is still None if corr is None: if 'mul' in", "!= 'discrete': self._delta_rp_name = 'drp_' + tracer1['name'] elif tracer2['type'] == 'discrete' and tracer1['type']", "(types[0] == types[1]): raise ValueError('You asked for relativistic effects or standard asymmetry,' '", "if self.radiation_flag: names = [self._tracer1['name'], self._tracer2['name']] if not ('QSO' in names and 'LYA'", "for el in bb_config if el['func'] != 'broadband_sky'] for index, config in enumerate(normal_broadbands):", "a Gaussian broadband in rp,rt for the sky residuals. Parameters ---------- bb_term :", "decrease = params['qso_rad_decrease'] # Compute the QSO radiation model xi_rad = strength /", "coords of Xi ap : float Alpha parallel at : float Alpha transverse", "bool, optional Whether this is a metal correlation, by default False \"\"\" self._config", "bb_config): \"\"\"Initialize the broadband terms. Parameters ---------- bb_config : list list with configs", "Array Output correlation function \"\"\" # Compute the core xi = self.compute_core(pk, PktoXi_obj,", "[] for i in r1_powers: for j in r2_powers: bb_params.append(params['{} ({},{})'.format( bb_term['name'], i,", "-1) self._tracer1 = tracer1 self._tracer2 = tracer2 self._z_eff = fiducial['z_eff'] self._rel_z_evol = (1.", "D1[i] = 5/2.*Omega_m*hubble(z[i], *pars)*quad(dD1, 0, a, args=pars)[0] D1 = interp1d(z, D1) growth =", "(p0 + p1*(1. + self._z)**2) / (p0 + p1 * (1 + self._z_eff)**2)", "1D Array Output xi relativistic \"\"\" assert 'continuous' in [self._tracer1['type'], self._tracer2['type']] assert self._tracer1['type']", "config file fiducial : dict fiducial config coords_grid : dict Dictionary with coordinate", "one type (multiplicative/additive). Parameters ---------- params : dict Computation parameters pos_type : string", "assert 'QSO' in [self._tracer1['name'], self._tracer2['name']] assert self._tracer1['name'] != self._tracer2['name'] # Compute the shifted", "self._config.get(handle_name, 'standard') else: evol_model = self._config.get('z evol', 'standard') # Compute the bias evolution", "standard asymmetry,' ' but they only work for the cross') def compute(self, pk,", "using the right model if 'croom' in evol_model: bias_evol = self._bias_evol_croom(params, tracer_name) else:", "we need to add or multiply if corr is None: corr = self.broadband_sky(bb_term,", "metal_corr : bool, optional Whether this is a metal correlation, by default False", "to turn Pk into Xi params : dict Computation parameters Returns ------- 1D", "dtype=float) / (nbins-1) D1 = np.zeros(nbins, dtype=float) pars = (Omega_m, Omega_de) for i", "in pos_type: corr = 1 + corr elif 'mul' in pos_type: corr *=", "Omega_de): return np.sqrt(Omega_m*(1+z)**3 + Omega_de + (1-Omega_m-Omega_de)*(1+z)**2) def dD1(a, Omega_m, Omega_de): z =", "effects if self.relativistic_flag: xi += self.compute_xi_relativistic(pk_lin, PktoXi_obj, params) # Add standard asymmetry if", "for i in r1_powers: for j in r2_powers: bb_params.append(params['{} ({},{})'.format( bb_term['name'], i, j)])", "self.relativistic_flag = self._config.getboolean('relativistic correction') self.asymmetry_flag = False if 'standard asymmetry' in self._config: self.asymmetry_flag", "the bias evolution using the right model if 'croom' in evol_model: bias_evol =", "float Alpha parallel at : float Alpha transverse delta_rp : float, optional Delta", "= coords_grid['r'] self._mu = coords_grid['mu'] self._z = coords_grid['z'] self._multipole = config.getint('single_multipole', -1) self._tracer1", "corr def broadband(self, bb_term, params): \"\"\"Compute broadband term. Calculates a power-law broadband in", "defaults if z_grid is None: z_grid = self._z if z_fid is None: z_fid", ": string String with position and type, must be one of: 'pre-mul' or", "Xi ap : float Alpha parallel at : float Alpha transverse delta_rp :", "= fiducial['z_eff'] self._rel_z_evol = (1. + self._z) / (1 + self._z_eff) self._scale_params =", "Returns ------- 1d Array Output broadband \"\"\" r1 = self._r / 100. r2", "z_fid=None, Omega_m=None, Omega_de=None): \"\"\"Compute growth factor. Implements eq. 7.77 from <NAME>'s Modern Cosmology", "= self._rel_z_evol**p0 return bias_z def _bias_evol_croom(self, params, tracer_name): \"\"\"Bias evolution Croom model for", "params) return xi def compute_core(self, pk, PktoXi_obj, params): \"\"\"Compute the core of the", "Array Output xi relativistic \"\"\" assert 'continuous' in [self._tracer1['type'], self._tracer2['type']] assert self._tracer1['type'] !=", "params : dict Computation parameters Returns ------- 1D Xi QSO radiation model \"\"\"", "'QSO' in [self._tracer1['name'], self._tracer2['name']] assert self._tracer1['name'] != self._tracer2['name'] # Compute the shifted r", "'drp_' + tracer1['name'] elif tracer2['type'] == 'discrete' and tracer1['type'] != 'discrete': self._delta_rp_name =", "# Compute the core xi = self.compute_core(pk, PktoXi_obj, params) # Add bias evolution", "Slow operations should be kept in init as that is only called once", "Array Linear isotropic power spectrum PktoXi_obj : vega.PktoXi An instance of the transform", "still None if corr is None: if 'mul' in pos_type: corr = 1.", "np.sqrt(1 - self._mu**2) r_min, r_max, dr = bb_term['r_config'] mu_min, mu_max, dmu = bb_term['mu_config']", "mu_max, dmu = bb_term['mu_config'] r1_powers = np.arange(r_min, r_max + 1, dr) r2_powers =", "0.) rp = self._r * self._mu + delta_rp rt = self._r * np.sqrt(1", "is called many times and should be fast Extensions should have their separate", "= D1(z_grid) / D1(z_fid) return growth**2 def _init_broadband(self, bb_config): \"\"\"Initialize the broadband terms.", "the cross (the transverse proximity effect) Parameters ---------- params : dict Computation parameters", "dict Computation parameters Returns ------- 1D Array Output correlation function \"\"\" # Check", "tracer1 self._tracer2 = tracer2 self._z_eff = fiducial['z_eff'] self._rel_z_evol = (1. + self._z) /", "+ self.broadband(bb_term, params) else: corr += self.broadband(bb_term, params) else: # Initialize the broadband", "corr[~w] = 0. return corr def broadband(self, bb_term, params): \"\"\"Compute broadband term. Calculates", "with configs of broadband terms, by default None metal_corr : bool, optional Whether", "Xi coordinates delta_rp = params.get(self._delta_rp_name, 0.) ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu", "# Check for QSO radiation modeling and check if it is QSOxLYA #", "for j in r2_powers: bb_params.append(params['{} ({},{})'.format( bb_term['name'], i, j)]) bb_params = np.array(bb_params).reshape(-1, r_max", "j)]) bb_params = np.array(bb_params).reshape(-1, r_max - r_min + 1) corr = (bb_params[:, :,", "*= 1 + self.broadband(bb_term, params) else: corr += self.broadband(bb_term, params) else: # Initialize", "broadband \"\"\" r1 = self._r / 100. r2 = self._mu if bb_term['rp_rt'] ==", "= self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu = self._rescale_coords(self._r, self._mu, ap, at, delta_rp) # Compute", "self._z_eff = fiducial['z_eff'] self._rel_z_evol = (1. + self._z) / (1 + self._z_eff) self._scale_params", "Croom et al. 2005. Parameters ---------- params : dict Computation parameters tracer_name :", "+= self.broadband(bb_term, params) else: # Initialize the broadband and check # if we", "tracer1['type'] != 'discrete': self._delta_rp_name = 'drp_' + tracer2['name'] # Precompute growth self._z_fid =", "enumerate(normal_broadbands): # Create the name for the parameters of this term name =", "radiation model xi_rad = strength / (r_shift**2) * (1 - asymmetry * (1", "np.arange(nbins, dtype=float) / (nbins-1) D1 = np.zeros(nbins, dtype=float) pars = (Omega_m, Omega_de) for", "'standard asymmetry' in self._config: self.asymmetry_flag = self._config.getboolean('standard asymmetry') if self.relativistic_flag or self.asymmetry_flag: types", "asked for relativistic effects or standard asymmetry,' ' but they only work for", "= tracer1 self._tracer2 = tracer2 self._z_eff = fiducial['z_eff'] self._rel_z_evol = (1. + self._z)", "return growth**2 # Compute the growth at each redshift on the grid growth", "= self._get_tracer_evol(params, self._tracer1['name']) bias_evol *= self._get_tracer_evol(params, self._tracer2['name']) return bias_evol def _get_tracer_evol(self, params, tracer_name):", "r_max - r_min + 1) corr = (bb_params[:, :, None, None] * r1**r1_powers[:,", "+ mu_shift) / lifetime + 1 / decrease)) return xi_rad def compute_xi_relativistic(self, pk,", "'relativistic correction' in self._config: self.relativistic_flag = self._config.getboolean('relativistic correction') self.asymmetry_flag = False if 'standard", "------- 1d Array Output broadband \"\"\" assert pos_type in ['pre-mul', 'pre-add', 'post-mul', 'post-add']", "quad from scipy.interpolate import interp1d from . import utils class CorrelationFunction: \"\"\"Correlation function", "should be kept in init as that is only called once # !", "is None: z_grid = self._z if z_fid is None: z_fid = self._z_fid if", "= np.zeros(nbins, dtype=float) pars = (Omega_m, Omega_de) for i in range(nbins): a =", "- self._mu**2) r_shift = np.sqrt(rp**2 + rt**2) mu_shift = rp / r_shift #", "Computation parameters Returns ------- ND Array Bias evolution for tracer \"\"\" # Compute", "computation and handling. # ! Slow operations should be kept in init as", "cross if self.radiation_flag and not params['peak']: xi += self.compute_qso_radiation(params) # Add relativistic effects", "= 'drp_' + tracer1['name'] elif tracer2['type'] == 'discrete' and tracer1['type'] != 'discrete': self._delta_rp_name", "# Precompute growth self._z_fid = fiducial['z_fiducial'] self._Omega_m = fiducial.get('Omega_m', None) self._Omega_de = fiducial.get('Omega_de',", "= self._config.get(handle_name, 'standard') else: evol_model = self._config.get('z evol', 'standard') # Compute the bias", "terms. Parameters ---------- bb_config : list list with configs of broadband terms \"\"\"", "None: z_fid = self._z_fid if Omega_m is None: Omega_m = self._Omega_m if Omega_de", "el in bb_config if el['func'] == 'broadband_sky'] for index, config in enumerate(sky_broadbands): assert", "Xi coordinates ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu = self._rescale_coords(self._r, self._mu, ap,", "if 'croom' in evol_model: bias_evol = self._bias_evol_croom(params, tracer_name) else: bias_evol = self._bias_evol_std(params, tracer_name)", "of z between 0 and zmax, then interpolate nbins = 100 zmax =", "D1 = np.zeros(nbins, dtype=float) pars = (Omega_m, Omega_de) for i in range(nbins): a", "= name bb['func'] = config['func'] bb['rp_rt'] = config['rp_rt'] bb['r_config'] = config['r_config'] bb['mu_config'] =", "1, dr) r2_powers = np.arange(mu_min, mu_max + 1, dmu) bb_params = [] for", "config['rp_rt'] bb['r_config'] = config['r_config'] bb['mu_config'] = config['mu_config'] self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb) #", "return corr def broadband_sky(self, bb_term, params): \"\"\"Compute sky broadband term. Calculates a Gaussian", "tracer \"\"\" p0 = params['alpha_{}'.format(tracer_name)] bias_z = self._rel_z_evol**p0 return bias_z def _bias_evol_croom(self, params,", "the grid growth = utils.growth_function(z_grid, Omega_m, Omega_de) # Scale to the fiducial redshift", "1./(a*hubble(z, Omega_m, Omega_de))**3 # Calculate D1 in 100 values of z between 0", "the contribution of QSO radiation to the cross (the transverse proximity effect) Parameters", "al. 2014). Parameters ---------- pk : ND Array Input power spectrum PktoXi_obj :", "term name = 'BB-{}-{}-{}'.format(config['cf_name'], index + len(normal_broadbands), config['func']) # Create the broadband term", "self.bb_terms['post-add'] = [] self.bb_terms['pre-mul'] = [] self.bb_terms['post-mul'] = [] # First pick up", "is None: if 'mul' in pos_type: corr = 1. else: corr = 0.", "Array Bias evolution for tracer \"\"\" handle_name = 'z evol {}'.format(tracer_name) if handle_name", "Initialize the broadband and check # if we need to add or multiply", "! Slow operations should be kept in init as that is only called", "if self.radiation_flag and not params['peak']: xi += self.compute_qso_radiation(params) # Add relativistic effects if", "rescaled_mu def compute_bias_evol(self, params): \"\"\"Compute bias evolution for the correlation function. Parameters ----------", "in types) or (types[0] == types[1]): raise ValueError('You asked for relativistic effects or", "the right model if 'croom' in evol_model: bias_evol = self._bias_evol_croom(params, tracer_name) else: bias_evol", "self.asymmetry_flag = False if 'standard asymmetry' in self._config: self.asymmetry_flag = self._config.getboolean('standard asymmetry') if", "by default None metal_corr : bool, optional Whether this is a metal correlation,", "transform object used to turn Pk into Xi params : dict Computation parameters", "= [] self.bb_terms['post-add'] = [] self.bb_terms['pre-mul'] = [] self.bb_terms['post-mul'] = [] # First", "or 'post-add' Returns ------- 1d Array Output broadband \"\"\" assert pos_type in ['pre-mul',", "normal_broadbands = [el for el in bb_config if el['func'] != 'broadband_sky'] for index,", "mu_shift**2)) xi_rad *= np.exp(-r_shift * ((1 + mu_shift) / lifetime + 1 /", "ND Array Input power spectrum PktoXi_obj : vega.PktoXi An instance of the transform", "\"\"\"Compute tracer bias evolution. Parameters ---------- params : dict Computation parameters tracer_name :", ". import utils class CorrelationFunction: \"\"\"Correlation function computation and handling. # ! Slow", "ap, at, delta_rp) # Compute the correlation function xi_asy = PktoXi_obj.pk_to_xi_asymmetry(rescaled_r, rescaled_mu, pk,", "[el for el in bb_config if el['func'] == 'broadband_sky'] for index, config in", "# ! Slow operations should be kept in init as that is only", "<NAME>'s Modern Cosmology book. Returns ------- ND Array Growth factor \"\"\" # Check", "in ['pre-mul', 'pre-add', 'post-mul', 'post-add'] corr = None # Loop over the right", "params): \"\"\"Compute bias evolution for the correlation function. Parameters ---------- params : dict", "contribution from standard asymmetry (Bonvin et al. 2014). Parameters ---------- pk : ND", "the bias evolution bias_evol = self._get_tracer_evol(params, self._tracer1['name']) bias_evol *= self._get_tracer_evol(params, self._tracer2['name']) return bias_evol", "the QSO auto as well? self.radiation_flag = False if 'radiation effects' in self._config:", "utils.growth_function(z_fid, Omega_m, Omega_de) return growth**2 def compute_growth_old(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): def hubble(z,", "Output xi asymmetry \"\"\" assert 'continuous' in [self._tracer1['type'], self._tracer2['type']] assert self._tracer1['type'] != self._tracer2['type']", "bias_evol = self._bias_evol_std(params, tracer_name) return bias_evol def _bias_evol_std(self, params, tracer_name): \"\"\"Bias evolution standard", "bb_config : list, optional list with configs of broadband terms, by default None", "Add bias evolution xi *= self.compute_bias_evol(params) # Add growth xi *= self.xi_growth #", "1 + corr elif 'mul' in pos_type: corr *= 1 + self.broadband_sky(bb_term, params)", "Create the name for the parameters of this term name = 'BB-{}-{}-{}'.format(config['cf_name'], index", "+ (1-Omega_m-Omega_de)*(1+z)**2) def dD1(a, Omega_m, Omega_de): z = 1/a-1 return 1./(a*hubble(z, Omega_m, Omega_de))**3", "= self.broadband_sky(bb_term, params) if 'mul' in pos_type: corr = 1 + corr elif", "evol_model = self._config.get('z evol', 'standard') # Compute the bias evolution using the right", "[el for el in bb_config if el['func'] != 'broadband_sky'] for index, config in", "rt = self._r * np.sqrt(1 - self._mu**2) r_shift = np.sqrt(rp**2 + rt**2) mu_shift", "self.broadband_sky(bb_term, params) # Give defaults if corr is still None if corr is", "Parameters ---------- pk : ND Array Input power spectrum PktoXi_obj : vega.PktoXi An", "ScaleParameters object tracer1 : dict Config of tracer 1 tracer2 : dict Config", "if ('continuous' not in types) or (types[0] == types[1]): raise ValueError('You asked for", "params): \"\"\"Compute the core of the correlation function. This does the Hankel transform", ": dict Computation parameters tracer_name : string Tracer name Returns ------- ND Array", "= config['bin_size_rp'] self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb) def compute_broadband(self, params, pos_type): \"\"\"Compute the", "1D Array Linear isotropic power spectrum PktoXi_obj : vega.PktoXi An instance of the", "at, delta_rp) # Compute the correlation function xi_asy = PktoXi_obj.pk_to_xi_asymmetry(rescaled_r, rescaled_mu, pk, params)", "at : float Alpha transverse delta_rp : float, optional Delta radius_parallel - nuisance", "mu grids delta_rp = params.get(self._delta_rp_name, 0.) rp = self._r * self._mu + delta_rp", "* (1 - asymmetry * (1 - mu_shift**2)) xi_rad *= np.exp(-r_shift * ((1", "== 'broadband_sky'] for index, config in enumerate(sky_broadbands): assert config['rp_rt'] == 'rp,rt' # Create", "(1 - mu_shift**2)) xi_rad *= np.exp(-r_shift * ((1 + mu_shift) / lifetime +", "'mul' in pos_type: corr *= 1 + self.broadband_sky(bb_term, params) else: corr += self.broadband_sky(bb_term,", "else: self.xi_growth = self.compute_growth_old(self._z, self._z_fid, self._Omega_m, self._Omega_de) # Initialize the broadband self.has_bb =", "\"\"\"Initialize the broadband terms. Parameters ---------- bb_config : list list with configs of", "tracer1 : dict Config of tracer 1 tracer2 : dict Config of tracer", "config self._r = coords_grid['r'] self._mu = coords_grid['mu'] self._z = coords_grid['z'] self._multipole = config.getint('single_multipole',", "Returns ------- ND Array Bias evolution for tracer \"\"\" handle_name = 'z evol", "self._delta_rp_name = 'drp_' + tracer2['name'] # Precompute growth self._z_fid = fiducial['z_fiducial'] self._Omega_m =", "bias_evol def _get_tracer_evol(self, params, tracer_name): \"\"\"Compute tracer bias evolution. Parameters ---------- params :", "np.zeros(len(mu)) rescaled_r[mask] = np.sqrt(rescaled_rp**2 + rescaled_rt**2) rescaled_mu[mask] = rescaled_rp / rescaled_r[mask] return rescaled_r,", "r_shift = np.sqrt(rp**2 + rt**2) mu_shift = rp / r_shift # Get the", "Initialize the broadband self.has_bb = False if bb_config is not None: self._init_broadband(bb_config) self.has_bb", "Delta radius_parallel - nuisance correction for wrong redshift, used for discrete tracers, by", "# Get rescaled Xi coordinates ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu =", "tracer \"\"\" handle_name = 'z evol {}'.format(tracer_name) if handle_name in self._config: evol_model =", "params): \"\"\"Compute broadband term. Calculates a power-law broadband in r and mu or", "\"\"\"Calculate the cross-correlation contribution from relativistic effects (Bonvin et al. 2014). Parameters ----------", "relativistic effects if self.relativistic_flag: xi += self.compute_xi_relativistic(pk_lin, PktoXi_obj, params) # Add standard asymmetry", "fiducial.get('Omega_de', None) if not config.getboolean('old_growth_func', False): self.xi_growth = self.compute_growth(self._z, self._z_fid, self._Omega_m, self._Omega_de) else:", "can be called from outside \"\"\" def __init__(self, config, fiducial, coords_grid, scale_params, tracer1,", "Parameters ---------- bb_term : dict broadband term config params : dict Computation parameters", "in pos_type: corr *= 1 + self.broadband(bb_term, params) else: corr += self.broadband(bb_term, params)", "np.sqrt(2. * np.pi)) corr *= np.exp(-0.5 * (rt / sigma)**2) w = (rp", "1D Xi QSO radiation model \"\"\" assert 'QSO' in [self._tracer1['name'], self._tracer2['name']] assert self._tracer1['name']", "for one position (pre-distortion/post-distortion) and one type (multiplicative/additive). Parameters ---------- params : dict", ": ND array Array of mu = rp/r coords of Xi ap :", "bb_params = np.array(bb_params).reshape(-1, r_max - r_min + 1) corr = (bb_params[:, :, None,", "+ tracer2['name'] # Precompute growth self._z_fid = fiducial['z_fiducial'] self._Omega_m = fiducial.get('Omega_m', None) self._Omega_de", "\"\"\"Compute growth factor. Implements eq. 7.77 from <NAME>'s Modern Cosmology book. Returns -------", "rescaled_r[mask] = np.sqrt(rescaled_rp**2 + rescaled_rt**2) rescaled_mu[mask] = rescaled_rp / rescaled_r[mask] return rescaled_r, rescaled_mu", "{} bb['name'] = name bb['func'] = config['func'] bb['rp_rt'] = config['rp_rt'] bb['r_config'] = config['r_config']", "tracer2 : dict Config of tracer 2 bb_config : list, optional list with", "z scale_params : ScaleParameters ScaleParameters object tracer1 : dict Config of tracer 1", "radiation modeling and check if it is QSOxLYA # Does this work for", "is only called once # ! Compute is called many times and should", "factor \"\"\" # Check the defaults if z_grid is None: z_grid = self._z", "growth**2 # Compute the growth at each redshift on the grid growth =", "bb_config : list list with configs of broadband terms \"\"\" self.bb_terms = {}", "pk : ND Array Input power spectrum PktoXi_obj : vega.PktoXi An instance of", "bb_term, params): \"\"\"Compute broadband term. Calculates a power-law broadband in r and mu", "string Name of tracer Returns ------- ND Array Bias evolution for tracer \"\"\"", "self._mu**2) r_min, r_max, dr = bb_term['r_config'] mu_min, mu_max, dmu = bb_term['mu_config'] r1_powers =", "params): \"\"\"Calculate the cross-correlation contribution from relativistic effects (Bonvin et al. 2014). Parameters", "def compute_bias_evol(self, params): \"\"\"Compute bias evolution for the correlation function. Parameters ---------- params", "Parameters ---------- config : ConfigParser model section of config file fiducial : dict", "in [self._tracer1['type'], self._tracer2['type']] assert self._tracer1['type'] != self._tracer2['type'] # Get rescaled Xi coordinates delta_rp", "Computation parameters Returns ------- 1D Array Output xi relativistic \"\"\" assert 'continuous' in", "QSO radiation modeling for cross if self.radiation_flag and not params['peak']: xi += self.compute_qso_radiation(params)", "of tracer Returns ------- ND Array Bias evolution for tracer \"\"\" handle_name =", "of this term name = 'BB-{}-{}-{}'.format(config['cf_name'], index + len(normal_broadbands), config['func']) # Create the", "1. else: corr = 0. return corr def broadband_sky(self, bb_term, params): \"\"\"Compute sky", "Array Growth factor \"\"\" # Check the defaults if z_grid is None: z_grid", "for index, config in enumerate(normal_broadbands): # Create the name for the parameters of", "\"\"\"Compute broadband term. Calculates a power-law broadband in r and mu or rp,rt.", "mu[mask] + delta_rp rt = r[mask] * np.sqrt(1 - mu[mask]**2) rescaled_rp = ap", "Array of mu = rp/r coords of Xi ap : float Alpha parallel", "pos_type in ['pre-mul', 'pre-add', 'post-mul', 'post-add'] corr = None # Loop over the", "delta rp (Only for the cross) self._delta_rp_name = None if tracer1['type'] == 'discrete'", "+ corr elif 'mul' in pos_type: corr *= 1 + self.broadband_sky(bb_term, params) else:", "+ 1) corr = (bb_params[:, :, None, None] * r1**r1_powers[:, None, None] *", "def _get_tracer_evol(self, params, tracer_name): \"\"\"Compute tracer bias evolution. Parameters ---------- params : dict", "return 1./(a*hubble(z, Omega_m, Omega_de))**3 # Calculate D1 in 100 values of z between", "if 'standard asymmetry' in self._config: self.asymmetry_flag = self._config.getboolean('standard asymmetry') if self.relativistic_flag or self.asymmetry_flag:", "of this term name = 'BB-{}-{} {} {} {}'.format(config['cf_name'], index, config['type'], config['pre'], config['rp_rt'])", "# Does this work for the QSO auto as well? self.radiation_flag = False", "False \"\"\" self._config = config self._r = coords_grid['r'] self._mu = coords_grid['mu'] self._z =", "self._bias_evol_croom(params, tracer_name) else: bias_evol = self._bias_evol_std(params, tracer_name) return bias_evol def _bias_evol_std(self, params, tracer_name):", "return bias_evol def _bias_evol_std(self, params, tracer_name): \"\"\"Bias evolution standard model. Parameters ---------- params", "list with configs of broadband terms \"\"\" self.bb_terms = {} self.bb_terms['pre-add'] = []", "contribution of QSO radiation to the cross (the transverse proximity effect) Parameters ----------", "wrong redshift, used for discrete tracers, by default 0. Returns ------- ND Array", "in 100 values of z between 0 and zmax, then interpolate nbins =", "# Compute the shifted r and mu grids delta_rp = params.get(self._delta_rp_name, 0.) rp", "for QSO radiation effects, but it' ' can only be applied to the", "config['type']].append(bb) def compute_broadband(self, params, pos_type): \"\"\"Compute the broadband terms for one position (pre-distortion/post-distortion)", "self.bb_terms['post-mul'] = [] # First pick up the normal broadband terms normal_broadbands =", "0.) ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu = self._rescale_coords(self._r, self._mu, ap, at,", "bb_term in self.bb_terms[pos_type]: # Check if it's sky or normal broadband if bb_term['func']", "effects and standard asymmetry self.relativistic_flag = False if 'relativistic correction' in self._config: self.relativistic_flag", "asymmetry = params['qso_rad_asymmetry'] lifetime = params['qso_rad_lifetime'] decrease = params['qso_rad_decrease'] # Compute the QSO", "= config['func'] bb['bin_size_rp'] = config['bin_size_rp'] self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb) def compute_broadband(self, params,", "!= 'broadband_sky': # Initialize the broadband and check # if we need to", "Omega_m = self._Omega_m if Omega_de is None: Omega_de = self._Omega_de # Check if", "modeling for cross if self.radiation_flag and not params['peak']: xi += self.compute_qso_radiation(params) # Add", "Computation parameters Returns ------- 1D Array Output xi asymmetry \"\"\" assert 'continuous' in", "* np.sqrt(1 - mu[mask]**2) rescaled_rp = ap * rp rescaled_rt = at *", "'croom' in evol_model: bias_evol = self._bias_evol_croom(params, tracer_name) else: bias_evol = self._bias_evol_std(params, tracer_name) return", "= params['qso_rad_lifetime'] decrease = params['qso_rad_decrease'] # Compute the QSO radiation model xi_rad =", "PktoXi_obj, params): \"\"\"Calculate the cross-correlation contribution from standard asymmetry (Bonvin et al. 2014).", "import utils class CorrelationFunction: \"\"\"Correlation function computation and handling. # ! Slow operations", "and should be fast Extensions should have their separate method of the form", "to the cross (QSOxLya)') # Check for relativistic effects and standard asymmetry self.relativistic_flag", ": dict Dictionary with coordinate grid - r, mu, z scale_params : ScaleParameters", "return bias_evol def _get_tracer_evol(self, params, tracer_name): \"\"\"Compute tracer bias evolution. Parameters ---------- params", "r1_powers = np.arange(r_min, r_max + 1, dr) r2_powers = np.arange(mu_min, mu_max + 1,", "in self._config: evol_model = self._config.get(handle_name, 'standard') else: evol_model = self._config.get('z evol', 'standard') #", "name = 'BB-{}-{} {} {} {}'.format(config['cf_name'], index, config['type'], config['pre'], config['rp_rt']) # Create the", ": dict fiducial config coords_grid : dict Dictionary with coordinate grid - r,", "= self.broadband(bb_term, params) if 'mul' in pos_type: corr = 1 + corr elif", "= r != 0 rp = r[mask] * mu[mask] + delta_rp rt =", "config['bin_size_rp'] self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb) def compute_broadband(self, params, pos_type): \"\"\"Compute the broadband", "'broadband_sky'] for index, config in enumerate(sky_broadbands): assert config['rp_rt'] == 'rp,rt' # Create the", "xi *= self.xi_growth # Add QSO radiation modeling for cross if self.radiation_flag and", "well? self.radiation_flag = False if 'radiation effects' in self._config: self.radiation_flag = self._config.getboolean('radiation effects')", "broadband terms. Parameters ---------- bb_config : list list with configs of broadband terms", "* np.sqrt(1 - self._mu**2) r_shift = np.sqrt(rp**2 + rt**2) mu_shift = rp /", "1 + corr elif 'mul' in pos_type: corr *= 1 + self.broadband(bb_term, params)", "Tracer name Returns ------- ND Array Bias evolution for tracer \"\"\" assert tracer_name", "self._r / 100. * self._mu r2 = self._r / 100. * np.sqrt(1 -", "= (bb_params[:, :, None, None] * r1**r1_powers[:, None, None] * r2**r2_powers[None, :, None]).sum(axis=(0,", "= self._bias_evol_std(params, tracer_name) return bias_evol def _bias_evol_std(self, params, tracer_name): \"\"\"Bias evolution standard model.", "at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu = self._rescale_coords(self._r, self._mu, ap, at, delta_rp) #", "= self._r * np.sqrt(1 - self._mu**2) scale = params[bb_term['name'] + '-scale-sky'] sigma =", "Config of tracer 2 bb_config : list, optional list with configs of broadband", "+ 1, dr) r2_powers = np.arange(mu_min, mu_max + 1, dmu) bb_params = []", ": 1D Array Linear isotropic power spectrum PktoXi_obj : vega.PktoXi An instance of", "params['peak']: xi += self.compute_qso_radiation(params) # Add relativistic effects if self.relativistic_flag: xi += self.compute_xi_relativistic(pk_lin,", "broadband term. Calculates a Gaussian broadband in rp,rt for the sky residuals. Parameters", "in bb_config if el['func'] == 'broadband_sky'] for index, config in enumerate(sky_broadbands): assert config['rp_rt']", "'continuous' in [self._tracer1['type'], self._tracer2['type']] assert self._tracer1['type'] != self._tracer2['type'] # Get rescaled Xi coordinates", "Xi QSO radiation model \"\"\" assert 'QSO' in [self._tracer1['name'], self._tracer2['name']] assert self._tracer1['name'] !=", "evolution for tracer \"\"\" handle_name = 'z evol {}'.format(tracer_name) if handle_name in self._config:", "model \"\"\" assert 'QSO' in [self._tracer1['name'], self._tracer2['name']] assert self._tracer1['name'] != self._tracer2['name'] # Compute", "mu, ap, at, delta_rp=0.): \"\"\"Rescale Xi coordinates using ap/at. Parameters ---------- r :", "Array of radius coords of Xi mu : ND array Array of mu", "Array Output broadband \"\"\" rp = self._r * self._mu rt = self._r *", "return rescaled_r, rescaled_mu def compute_bias_evol(self, params): \"\"\"Compute bias evolution for the correlation function.", "correlation function. Parameters ---------- params : dict Computation parameters Returns ------- ND Array", "z_fid) / (1. + z_grid) return growth**2 # Compute the growth at each", "dict Computation parameters Returns ------- 1D Array Output xi asymmetry \"\"\" assert 'continuous'", "and tracer2['type'] != 'discrete': self._delta_rp_name = 'drp_' + tracer1['name'] elif tracer2['type'] == 'discrete'", "broadband(self, bb_term, params): \"\"\"Compute broadband term. Calculates a power-law broadband in r and", "in [self._tracer1['name'], self._tracer2['name']] assert self._tracer1['name'] != self._tracer2['name'] # Compute the shifted r and", "called once # ! Compute is called many times and should be fast", "nuisance correction for wrong redshift, used for discrete tracers, by default 0. Returns", "self.broadband_sky(bb_term, params) else: corr += self.broadband_sky(bb_term, params) # Give defaults if corr is", "correction for wrong redshift, used for discrete tracers, by default 0. Returns -------", "bb['name'] = name bb['func'] = config['func'] bb['bin_size_rp'] = config['bin_size_rp'] self.bb_terms[config['pre'] + \"-\" +", "'rp,rt': r1 = self._r / 100. * self._mu r2 = self._r / 100.", "up the normal broadband terms normal_broadbands = [el for el in bb_config if", "strength / (r_shift**2) * (1 - asymmetry * (1 - mu_shift**2)) xi_rad *=", "Growth factor \"\"\" # Check the defaults if z_grid is None: z_grid =", "corr = 0. return corr def broadband_sky(self, bb_term, params): \"\"\"Compute sky broadband term.", "1 tracer2 : dict Config of tracer 2 bb_config : list, optional list", "asymmetry,' ' but they only work for the cross') def compute(self, pk, pk_lin,", "position (pre-distortion/post-distortion) and one type (multiplicative/additive). Parameters ---------- params : dict Computation parameters", "broadband \"\"\" rp = self._r * self._mu rt = self._r * np.sqrt(1 -", "fiducial['z_eff'] self._rel_z_evol = (1. + self._z) / (1 + self._z_eff) self._scale_params = scale_params", "'discrete' and tracer1['type'] != 'discrete': self._delta_rp_name = 'drp_' + tracer2['name'] # Precompute growth", "else: corr += self.broadband_sky(bb_term, params) # Give defaults if corr is still None", "+= self.compute_xi_asymmetry(pk_lin, PktoXi_obj, params) return xi def compute_core(self, pk, PktoXi_obj, params): \"\"\"Compute the", "metal_corr # Check if we need delta rp (Only for the cross) self._delta_rp_name", "QSO radiation effects, but it' ' can only be applied to the cross", "1 + self.broadband_sky(bb_term, params) else: corr += self.broadband_sky(bb_term, params) # Give defaults if", "object tracer1 : dict Config of tracer 1 tracer2 : dict Config of", "# Add standard asymmetry if self.asymmetry_flag: xi += self.compute_xi_asymmetry(pk_lin, PktoXi_obj, params) return xi", "# Check for delta rp delta_rp = 0. if self._delta_rp_name is not None:", "bb = {} bb['name'] = name bb['func'] = config['func'] bb['bin_size_rp'] = config['bin_size_rp'] self.bb_terms[config['pre']", "fiducial['z_fiducial'] self._Omega_m = fiducial.get('Omega_m', None) self._Omega_de = fiducial.get('Omega_de', None) if not config.getboolean('old_growth_func', False):", "in enumerate(normal_broadbands): # Create the name for the parameters of this term name", "!= 'discrete': self._delta_rp_name = 'drp_' + tracer2['name'] # Precompute growth self._z_fid = fiducial['z_fiducial']", "P(k). Parameters ---------- pk : ND Array Input power spectrum pk_lin : 1D", "= coords_grid['mu'] self._z = coords_grid['z'] self._multipole = config.getint('single_multipole', -1) self._tracer1 = tracer1 self._tracer2", "'-scale-sky'] sigma = params[bb_term['name'] + '-sigma-sky'] corr = scale / (sigma * np.sqrt(2.", "at, delta_rp=0.): \"\"\"Rescale Xi coordinates using ap/at. Parameters ---------- r : ND array", "self._config: self.asymmetry_flag = self._config.getboolean('standard asymmetry') if self.relativistic_flag or self.asymmetry_flag: types = [self._tracer1['type'], self._tracer2['type']]", "terms, by default None metal_corr : bool, optional Whether this is a metal", "i in r1_powers: for j in r2_powers: bb_params.append(params['{} ({},{})'.format( bb_term['name'], i, j)]) bb_params", "to the fiducial redshift growth /= utils.growth_function(z_fid, Omega_m, Omega_de) return growth**2 def compute_growth_old(self,", "corr += self.broadband_sky(bb_term, params) # Give defaults if corr is still None if", "z_grid = self._z if z_fid is None: z_fid = self._z_fid if Omega_m is", "the broadband self.has_bb = False if bb_config is not None: self._init_broadband(bb_config) self.has_bb =", "evol_model = self._config.get(handle_name, 'standard') else: evol_model = self._config.get('z evol', 'standard') # Compute the", "+ config['type']].append(bb) # Next pick up the sky broadban terms sky_broadbands = [el", "# Create the broadband term dictionary bb = {} bb['name'] = name bb['func']", "self.asymmetry_flag: types = [self._tracer1['type'], self._tracer2['type']] if ('continuous' not in types) or (types[0] ==", "= 0. if self._delta_rp_name is not None: delta_rp = params.get(self._delta_rp_name, 0.) # Get", "params['alpha_{}'.format(tracer_name)] bias_z = self._rel_z_evol**p0 return bias_z def _bias_evol_croom(self, params, tracer_name): \"\"\"Bias evolution Croom", "Returns ------- 1d Array Output broadband \"\"\" assert pos_type in ['pre-mul', 'pre-add', 'post-mul',", "# Next pick up the sky broadban terms sky_broadbands = [el for el", "model xi_rad = strength / (r_shift**2) * (1 - asymmetry * (1 -", "False): self.xi_growth = self.compute_growth(self._z, self._z_fid, self._Omega_m, self._Omega_de) else: self.xi_growth = self.compute_growth_old(self._z, self._z_fid, self._Omega_m,", "self._z_eff)**2) return bias_z def compute_growth(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): \"\"\"Compute growth factor. Implements", "dictionary bb = {} bb['name'] = name bb['func'] = config['func'] bb['bin_size_rp'] = config['bin_size_rp']", "broadband \"\"\" assert pos_type in ['pre-mul', 'pre-add', 'post-mul', 'post-add'] corr = None #", "self._r = coords_grid['r'] self._mu = coords_grid['mu'] self._z = coords_grid['z'] self._multipole = config.getint('single_multipole', -1)", "Omega_m, Omega_de): z = 1/a-1 return 1./(a*hubble(z, Omega_m, Omega_de))**3 # Calculate D1 in", "rescaled_r[mask] return rescaled_r, rescaled_mu def compute_bias_evol(self, params): \"\"\"Compute bias evolution for the correlation", "for input P(k). Parameters ---------- pk : ND Array Input power spectrum pk_lin", "between 0 and zmax, then interpolate nbins = 100 zmax = 5. z", "add or multiply if corr is None: corr = self.broadband_sky(bb_term, params) if 'mul'", "= self._config.getboolean('relativistic correction') self.asymmetry_flag = False if 'standard asymmetry' in self._config: self.asymmetry_flag =", "coords_grid, scale_params, tracer1, tracer2, bb_config=None, metal_corr=False): \"\"\" Parameters ---------- config : ConfigParser model", "bb['name'] = name bb['func'] = config['func'] bb['rp_rt'] = config['rp_rt'] bb['r_config'] = config['r_config'] bb['mu_config']", "# Check the defaults if z_grid is None: z_grid = self._z if z_fid", "self.broadband(bb_term, params) else: corr += self.broadband(bb_term, params) else: # Initialize the broadband and", "bb_term['name'], i, j)]) bb_params = np.array(bb_params).reshape(-1, r_max - r_min + 1) corr =", "bb_term['rp_rt'] == 'rp,rt': r1 = self._r / 100. * self._mu r2 = self._r", "if not config.getboolean('old_growth_func', False): self.xi_growth = self.compute_growth(self._z, self._z_fid, self._Omega_m, self._Omega_de) else: self.xi_growth =", ": dict Computation parameters Returns ------- 1D Array Output correlation function \"\"\" #", "to add or multiply if corr is None: corr = self.broadband(bb_term, params) if", "Modern Cosmology book. Returns ------- ND Array Growth factor \"\"\" # Check the", "D1) growth = D1(z_grid) / D1(z_fid) return growth**2 def _init_broadband(self, bb_config): \"\"\"Initialize the", "if 'mul' in pos_type: corr = 1 + corr elif 'mul' in pos_type:", "self._config.getboolean('radiation effects') if self.radiation_flag: names = [self._tracer1['name'], self._tracer2['name']] if not ('QSO' in names", "hubble(z, Omega_m, Omega_de): return np.sqrt(Omega_m*(1+z)**3 + Omega_de + (1-Omega_m-Omega_de)*(1+z)**2) def dD1(a, Omega_m, Omega_de):", "coordinates delta_rp = params.get(self._delta_rp_name, 0.) ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu =", "------- 1d Array Output broadband \"\"\" rp = self._r * self._mu rt =", ": dict Computation parameters Returns ------- 1d Array Output broadband \"\"\" r1 =", "pk, PktoXi_obj, params): \"\"\"Compute the core of the correlation function. This does the", "self._tracer2 = tracer2 self._z_eff = fiducial['z_eff'] self._rel_z_evol = (1. + self._z) / (1", "Returns ------- ND Array Rescaled radii ND Array Rescaled mu \"\"\" mask =", "= config['rp_rt'] bb['r_config'] = config['r_config'] bb['mu_config'] = config['mu_config'] self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb)", "as that is only called once # ! Compute is called many times", "of: 'pre-mul' or 'pre-add' or 'post-mul' or 'post-add' Returns ------- 1d Array Output", "into Xi params : dict Computation parameters Returns ------- 1D Array Output xi", "bias_z def _bias_evol_croom(self, params, tracer_name): \"\"\"Bias evolution Croom model for QSO, see Croom", "'post-mul' or 'post-add' Returns ------- 1d Array Output broadband \"\"\" assert pos_type in", "xi = PktoXi_obj.compute(rescaled_r, rescaled_mu, pk, self._multipole) return xi @staticmethod def _rescale_coords(r, mu, ap,", "------- ND Array Growth factor \"\"\" # Check the defaults if z_grid is", "or 'pre-add' or 'post-mul' or 'post-add' Returns ------- 1d Array Output broadband \"\"\"", "for wrong redshift, used for discrete tracers, by default 0. Returns ------- ND", "= config self._r = coords_grid['r'] self._mu = coords_grid['mu'] self._z = coords_grid['z'] self._multipole =", "see Croom et al. 2005. Parameters ---------- params : dict Computation parameters tracer_name", "Compute the correlation function xi_rel = PktoXi_obj.pk_to_xi_relativistic(rescaled_r, rescaled_mu, pk, params) return xi_rel def", "check if it is QSOxLYA # Does this work for the QSO auto", "None, None] * r2**r2_powers[None, :, None]).sum(axis=(0, 1, 2)) return corr def compute_qso_radiation(self, params):", "= self.compute_core(pk, PktoXi_obj, params) # Add bias evolution xi *= self.compute_bias_evol(params) # Add", "and one type (multiplicative/additive). Parameters ---------- params : dict Computation parameters pos_type :", "name Returns ------- ND Array Bias evolution for tracer \"\"\" assert tracer_name ==", "for tracer \"\"\" p0 = params['alpha_{}'.format(tracer_name)] bias_z = self._rel_z_evol**p0 return bias_z def _bias_evol_croom(self,", "tracer_name): \"\"\"Bias evolution Croom model for QSO, see Croom et al. 2005. Parameters", "* np.pi)) corr *= np.exp(-0.5 * (rt / sigma)**2) w = (rp >=", "'standard') # Compute the bias evolution using the right model if 'croom' in", "from scipy.interpolate import interp1d from . import utils class CorrelationFunction: \"\"\"Correlation function computation", "= PktoXi_obj.pk_to_xi_relativistic(rescaled_r, rescaled_mu, pk, params) return xi_rel def compute_xi_asymmetry(self, pk, PktoXi_obj, params): \"\"\"Calculate", "scipy.interpolate import interp1d from . import utils class CorrelationFunction: \"\"\"Correlation function computation and", "types[1]): raise ValueError('You asked for relativistic effects or standard asymmetry,' ' but they", "normal broadband if bb_term['func'] != 'broadband_sky': # Initialize the broadband and check #", "grids delta_rp = params.get(self._delta_rp_name, 0.) rp = self._r * self._mu + delta_rp rt", "self.xi_growth # Add QSO radiation modeling for cross if self.radiation_flag and not params['peak']:", "return xi def compute_core(self, pk, PktoXi_obj, params): \"\"\"Compute the core of the correlation", "= fiducial['z_fiducial'] self._Omega_m = fiducial.get('Omega_m', None) self._Omega_de = fiducial.get('Omega_de', None) if not config.getboolean('old_growth_func',", "correction' in self._config: self.relativistic_flag = self._config.getboolean('relativistic correction') self.asymmetry_flag = False if 'standard asymmetry'", "z = 1/a-1 return 1./(a*hubble(z, Omega_m, Omega_de))**3 # Calculate D1 in 100 values", "bb['mu_config'] = config['mu_config'] self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb) # Next pick up the", "self.compute_core(pk, PktoXi_obj, params) # Add bias evolution xi *= self.compute_bias_evol(params) # Add growth", "tracer Returns ------- ND Array Bias evolution for tracer \"\"\" handle_name = 'z", "compute_xi_relativistic(self, pk, PktoXi_obj, params): \"\"\"Calculate the cross-correlation contribution from relativistic effects (Bonvin et", "compute_xi_asymmetry(self, pk, PktoXi_obj, params): \"\"\"Calculate the cross-correlation contribution from standard asymmetry (Bonvin et", "Check for relativistic effects and standard asymmetry self.relativistic_flag = False if 'relativistic correction'", "corr = 1. else: corr = 0. return corr def broadband_sky(self, bb_term, params):", "Parameters ---------- params : dict Computation parameters Returns ------- ND Array Bias evolution", "7.77 from <NAME>'s Modern Cosmology book. Returns ------- ND Array Growth factor \"\"\"", "at, delta_rp) # Compute correlation function xi = PktoXi_obj.compute(rescaled_r, rescaled_mu, pk, self._multipole) return", "not config.getboolean('old_growth_func', False): self.xi_growth = self.compute_growth(self._z, self._z_fid, self._Omega_m, self._Omega_de) else: self.xi_growth = self.compute_growth_old(self._z,", "is None: growth = (1 + z_fid) / (1. + z_grid) return growth**2", "used to turn Pk into Xi params : dict Computation parameters Returns -------", "evolution for tracer \"\"\" assert tracer_name == \"QSO\" p0 = params[\"croom_par0\"] p1 =", "terms normal_broadbands = [el for el in bb_config if el['func'] != 'broadband_sky'] for", "corr is None: if 'mul' in pos_type: corr = 1. else: corr =", "- r, mu, z scale_params : ScaleParameters ScaleParameters object tracer1 : dict Config", "+ self._z)**2) / (p0 + p1 * (1 + self._z_eff)**2) return bias_z def", "p1 * (1 + self._z_eff)**2) return bias_z def compute_growth(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None):", "function. Parameters ---------- params : dict Computation parameters Returns ------- ND Array Bias", "= (rp >= 0.) & (rp < bb_term['bin_size_rp']) corr[~w] = 0. return corr", "growth**2 def _init_broadband(self, bb_config): \"\"\"Initialize the broadband terms. Parameters ---------- bb_config : list", "in r1_powers: for j in r2_powers: bb_params.append(params['{} ({},{})'.format( bb_term['name'], i, j)]) bb_params =", "= [] for i in r1_powers: for j in r2_powers: bb_params.append(params['{} ({},{})'.format( bb_term['name'],", "handle_name = 'z evol {}'.format(tracer_name) if handle_name in self._config: evol_model = self._config.get(handle_name, 'standard')", "al. 2005. Parameters ---------- params : dict Computation parameters tracer_name : string Tracer", "fast Extensions should have their separate method of the form 'compute_extension' that can", "delta_rp=0.): \"\"\"Rescale Xi coordinates using ap/at. Parameters ---------- r : ND array Array", "have dark energy if Omega_de is None: growth = (1 + z_fid) /", "be applied to the cross (QSOxLya)') # Check for relativistic effects and standard", "_get_tracer_evol(self, params, tracer_name): \"\"\"Compute tracer bias evolution. Parameters ---------- params : dict Computation", "Compute the growth at each redshift on the grid growth = utils.growth_function(z_grid, Omega_m,", "parameters pos_type : string String with position and type, must be one of:", "if self.relativistic_flag or self.asymmetry_flag: types = [self._tracer1['type'], self._tracer2['type']] if ('continuous' not in types)", "'-sigma-sky'] corr = scale / (sigma * np.sqrt(2. * np.pi)) corr *= np.exp(-0.5", ":, None]).sum(axis=(0, 1, 2)) return corr def compute_qso_radiation(self, params): \"\"\"Model the contribution of", "\"\"\"Compute the core of the correlation function. This does the Hankel transform of", "self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu = self._rescale_coords(self._r, self._mu, ap, at, delta_rp) # Compute correlation", "def compute_xi_asymmetry(self, pk, PktoXi_obj, params): \"\"\"Calculate the cross-correlation contribution from standard asymmetry (Bonvin", "corr = 1 + corr elif 'mul' in pos_type: corr *= 1 +", "self.bb_terms['pre-mul'] = [] self.bb_terms['post-mul'] = [] # First pick up the normal broadband", "# Check for relativistic effects and standard asymmetry self.relativistic_flag = False if 'relativistic", "transverse delta_rp : float, optional Delta radius_parallel - nuisance correction for wrong redshift,", "params['qso_rad_strength'] asymmetry = params['qso_rad_asymmetry'] lifetime = params['qso_rad_lifetime'] decrease = params['qso_rad_decrease'] # Compute the", "'compute_extension' that can be called from outside \"\"\" def __init__(self, config, fiducial, coords_grid,", "effects or standard asymmetry,' ' but they only work for the cross') def", "delta_rp) # Compute the correlation function xi_asy = PktoXi_obj.pk_to_xi_asymmetry(rescaled_r, rescaled_mu, pk, params) return", "def _init_broadband(self, bb_config): \"\"\"Initialize the broadband terms. Parameters ---------- bb_config : list list", "xi relativistic \"\"\" assert 'continuous' in [self._tracer1['type'], self._tracer2['type']] assert self._tracer1['type'] != self._tracer2['type'] #", "in self._config: self.radiation_flag = self._config.getboolean('radiation effects') if self.radiation_flag: names = [self._tracer1['name'], self._tracer2['name']] if", "Omega_de) for i in range(nbins): a = 1/(1+z[i]) D1[i] = 5/2.*Omega_m*hubble(z[i], *pars)*quad(dD1, 0,", ": dict Computation parameters Returns ------- 1D Xi QSO radiation model \"\"\" assert", "separate method of the form 'compute_extension' that can be called from outside \"\"\"", "return xi_rel def compute_xi_asymmetry(self, pk, PktoXi_obj, params): \"\"\"Calculate the cross-correlation contribution from standard", "1D Array Output correlation function \"\"\" # Check for delta rp delta_rp =", "growth /= utils.growth_function(z_fid, Omega_m, Omega_de) return growth**2 def compute_growth_old(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None):", "== 'rp,rt' # Create the name for the parameters of this term name", "p0 = params['alpha_{}'.format(tracer_name)] bias_z = self._rel_z_evol**p0 return bias_z def _bias_evol_croom(self, params, tracer_name): \"\"\"Bias", "tracer_name : string Tracer name Returns ------- ND Array Bias evolution for tracer", "'broadband_sky': # Initialize the broadband and check # if we need to add", "PktoXi_obj, params) return xi def compute_core(self, pk, PktoXi_obj, params): \"\"\"Compute the core of", "= [] self.bb_terms['post-mul'] = [] # First pick up the normal broadband terms", "for relativistic effects or standard asymmetry,' ' but they only work for the", "0. return corr def broadband(self, bb_term, params): \"\"\"Compute broadband term. Calculates a power-law", "= self._z_fid if Omega_m is None: Omega_m = self._Omega_m if Omega_de is None:", ":, None, None] * r1**r1_powers[:, None, None] * r2**r2_powers[None, :, None]).sum(axis=(0, 1, 2))", "None: z_grid = self._z if z_fid is None: z_fid = self._z_fid if Omega_m", "growth = (1 + z_fid) / (1. + z_grid) return growth**2 # Compute", "True # Check for QSO radiation modeling and check if it is QSOxLYA", "Omega_m, Omega_de))**3 # Calculate D1 in 100 values of z between 0 and", "this term name = 'BB-{}-{}-{}'.format(config['cf_name'], index + len(normal_broadbands), config['func']) # Create the broadband", "of Xi mu : ND array Array of mu = rp/r coords of", "! Compute is called many times and should be fast Extensions should have", "tracer_name): \"\"\"Compute tracer bias evolution. Parameters ---------- params : dict Computation parameters tracer_name", "= (1. + self._z) / (1 + self._z_eff) self._scale_params = scale_params self._metal_corr =", "pos_type): \"\"\"Compute the broadband terms for one position (pre-distortion/post-distortion) and one type (multiplicative/additive).", "self._config.getboolean('standard asymmetry') if self.relativistic_flag or self.asymmetry_flag: types = [self._tracer1['type'], self._tracer2['type']] if ('continuous' not", "params) # Add bias evolution xi *= self.compute_bias_evol(params) # Add growth xi *=", "Omega_de))**3 # Calculate D1 in 100 values of z between 0 and zmax,", "be one of: 'pre-mul' or 'pre-add' or 'post-mul' or 'post-add' Returns ------- 1d", "+ self._z) / (1 + self._z_eff) self._scale_params = scale_params self._metal_corr = metal_corr #", "ConfigParser model section of config file fiducial : dict fiducial config coords_grid :", "proximity effect) Parameters ---------- params : dict Computation parameters Returns ------- 1D Xi", "type (multiplicative/additive). Parameters ---------- params : dict Computation parameters pos_type : string String", "correlation function \"\"\" # Compute the core xi = self.compute_core(pk, PktoXi_obj, params) #", "for the cross) self._delta_rp_name = None if tracer1['type'] == 'discrete' and tracer2['type'] !=", "+ self.broadband_sky(bb_term, params) else: corr += self.broadband_sky(bb_term, params) # Give defaults if corr", "correlation function xi = PktoXi_obj.compute(rescaled_r, rescaled_mu, pk, self._multipole) return xi @staticmethod def _rescale_coords(r,", "corr *= np.exp(-0.5 * (rt / sigma)**2) w = (rp >= 0.) &", "self.has_bb = False if bb_config is not None: self._init_broadband(bb_config) self.has_bb = True #", "= self._bias_evol_croom(params, tracer_name) else: bias_evol = self._bias_evol_std(params, tracer_name) return bias_evol def _bias_evol_std(self, params,", "by default 0. Returns ------- ND Array Rescaled radii ND Array Rescaled mu", "broadband terms \"\"\" self.bb_terms = {} self.bb_terms['pre-add'] = [] self.bb_terms['post-add'] = [] self.bb_terms['pre-mul']", "cross-correlation contribution from standard asymmetry (Bonvin et al. 2014). Parameters ---------- pk :", ": float, optional Delta radius_parallel - nuisance correction for wrong redshift, used for", "/ D1(z_fid) return growth**2 def _init_broadband(self, bb_config): \"\"\"Initialize the broadband terms. Parameters ----------", "np from scipy.integrate import quad from scipy.interpolate import interp1d from . import utils", "Xi params : dict Computation parameters Returns ------- 1D Array Output correlation function", "/ (p0 + p1 * (1 + self._z_eff)**2) return bias_z def compute_growth(self, z_grid=None,", "bias evolution for the correlation function. Parameters ---------- params : dict Computation parameters", "= self._rescale_coords(self._r, self._mu, ap, at, delta_rp) # Compute correlation function xi = PktoXi_obj.compute(rescaled_r,", "self._get_tracer_evol(params, self._tracer1['name']) bias_evol *= self._get_tracer_evol(params, self._tracer2['name']) return bias_evol def _get_tracer_evol(self, params, tracer_name): \"\"\"Compute", "*= self._get_tracer_evol(params, self._tracer2['name']) return bias_evol def _get_tracer_evol(self, params, tracer_name): \"\"\"Compute tracer bias evolution.", "for the parameters of this term name = 'BB-{}-{}-{}'.format(config['cf_name'], index + len(normal_broadbands), config['func'])", "if bb_config is not None: self._init_broadband(bb_config) self.has_bb = True # Check for QSO", "* np.sqrt(1 - self._mu**2) r_min, r_max, dr = bb_term['r_config'] mu_min, mu_max, dmu =", "{} {}'.format(config['cf_name'], index, config['type'], config['pre'], config['rp_rt']) # Create the broadband term dictionary bb", "{}'.format(config['cf_name'], index, config['type'], config['pre'], config['rp_rt']) # Create the broadband term dictionary bb =", "grid growth = utils.growth_function(z_grid, Omega_m, Omega_de) # Scale to the fiducial redshift growth", "or normal broadband if bb_term['func'] != 'broadband_sky': # Initialize the broadband and check", "= self._r / 100. * np.sqrt(1 - self._mu**2) r_min, r_max, dr = bb_term['r_config']", "self._mu + delta_rp rt = self._r * np.sqrt(1 - self._mu**2) r_shift = np.sqrt(rp**2", "config coords_grid : dict Dictionary with coordinate grid - r, mu, z scale_params", "' can only be applied to the cross (QSOxLya)') # Check for relativistic", "bias evolution. Parameters ---------- params : dict Computation parameters tracer_name : string Name", "optional list with configs of broadband terms, by default None metal_corr : bool,", "relativistic effects or standard asymmetry,' ' but they only work for the cross')", "\"\"\" self.bb_terms = {} self.bb_terms['pre-add'] = [] self.bb_terms['post-add'] = [] self.bb_terms['pre-mul'] = []", "of radius coords of Xi mu : ND array Array of mu =", "not None: self._init_broadband(bb_config) self.has_bb = True # Check for QSO radiation modeling and", "broadband term. Calculates a power-law broadband in r and mu or rp,rt. Parameters", "'BB-{}-{} {} {} {}'.format(config['cf_name'], index, config['type'], config['pre'], config['rp_rt']) # Create the broadband term", "if Omega_m is None: Omega_m = self._Omega_m if Omega_de is None: Omega_de =", "index + len(normal_broadbands), config['func']) # Create the broadband term dictionary bb = {}", "the parameters of this term name = 'BB-{}-{}-{}'.format(config['cf_name'], index + len(normal_broadbands), config['func']) #", "self._Omega_de # Check if we have dark energy if Omega_de is None: growth", "parameters Returns ------- 1D Array Output xi relativistic \"\"\" assert 'continuous' in [self._tracer1['type'],", "eq. 7.77 from <NAME>'s Modern Cosmology book. Returns ------- ND Array Growth factor", "raise ValueError('You asked for QSO radiation effects, but it' ' can only be", "return bias_z def compute_growth(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): \"\"\"Compute growth factor. Implements eq.", "None: delta_rp = params.get(self._delta_rp_name, 0.) # Get rescaled Xi coordinates ap, at =", "False if 'standard asymmetry' in self._config: self.asymmetry_flag = self._config.getboolean('standard asymmetry') if self.relativistic_flag or", "called from outside \"\"\" def __init__(self, config, fiducial, coords_grid, scale_params, tracer1, tracer2, bb_config=None,", "0, a, args=pars)[0] D1 = interp1d(z, D1) growth = D1(z_grid) / D1(z_fid) return", "'LYA' in names): raise ValueError('You asked for QSO radiation effects, but it' '", "over the right pos/type configuration for bb_term in self.bb_terms[pos_type]: # Check if it's", "coordinates using ap/at. Parameters ---------- r : ND array Array of radius coords", "function \"\"\" # Compute the core xi = self.compute_core(pk, PktoXi_obj, params) # Add", "\"-\" + config['type']].append(bb) def compute_broadband(self, params, pos_type): \"\"\"Compute the broadband terms for one", "bb_params = [] for i in r1_powers: for j in r2_powers: bb_params.append(params['{} ({},{})'.format(", "'rp,rt' # Create the name for the parameters of this term name =", "r[mask] * np.sqrt(1 - mu[mask]**2) rescaled_rp = ap * rp rescaled_rt = at", "\"\"\" p0 = params['alpha_{}'.format(tracer_name)] bias_z = self._rel_z_evol**p0 return bias_z def _bias_evol_croom(self, params, tracer_name):", "= np.sqrt(rescaled_rp**2 + rescaled_rt**2) rescaled_mu[mask] = rescaled_rp / rescaled_r[mask] return rescaled_r, rescaled_mu def", "\"\"\"Calculate the cross-correlation contribution from standard asymmetry (Bonvin et al. 2014). Parameters ----------", "+ '-sigma-sky'] corr = scale / (sigma * np.sqrt(2. * np.pi)) corr *=", "params : dict Computation parameters tracer_name : string Tracer name Returns ------- ND", "self._z_fid, self._Omega_m, self._Omega_de) else: self.xi_growth = self.compute_growth_old(self._z, self._z_fid, self._Omega_m, self._Omega_de) # Initialize the", "ND array Array of mu = rp/r coords of Xi ap : float", "as well? self.radiation_flag = False if 'radiation effects' in self._config: self.radiation_flag = self._config.getboolean('radiation", "self._z if z_fid is None: z_fid = self._z_fid if Omega_m is None: Omega_m", "(1 + z_fid) / (1. + z_grid) return growth**2 # Compute the growth", "radiation model \"\"\" assert 'QSO' in [self._tracer1['name'], self._tracer2['name']] assert self._tracer1['name'] != self._tracer2['name'] #", "self.bb_terms = {} self.bb_terms['pre-add'] = [] self.bb_terms['post-add'] = [] self.bb_terms['pre-mul'] = [] self.bb_terms['post-mul']", "array Array of mu = rp/r coords of Xi ap : float Alpha", "- r_min + 1) corr = (bb_params[:, :, None, None] * r1**r1_powers[:, None,", "('QSO' in names and 'LYA' in names): raise ValueError('You asked for QSO radiation", "self._delta_rp_name = 'drp_' + tracer1['name'] elif tracer2['type'] == 'discrete' and tracer1['type'] != 'discrete':", "# Check if we need delta rp (Only for the cross) self._delta_rp_name =", "ND array Array of radius coords of Xi mu : ND array Array", "pos_type: corr *= 1 + self.broadband(bb_term, params) else: corr += self.broadband(bb_term, params) else:", "Omega_de): z = 1/a-1 return 1./(a*hubble(z, Omega_m, Omega_de))**3 # Calculate D1 in 100", "is a metal correlation, by default False \"\"\" self._config = config self._r =", "if we need to add or multiply if corr is None: corr =", "that is only called once # ! Compute is called many times and", "of tracer 2 bb_config : list, optional list with configs of broadband terms,", "or multiply if corr is None: corr = self.broadband(bb_term, params) if 'mul' in", "tracer1['type'] == 'discrete' and tracer2['type'] != 'discrete': self._delta_rp_name = 'drp_' + tracer1['name'] elif", "bias_evol = self._bias_evol_croom(params, tracer_name) else: bias_evol = self._bias_evol_std(params, tracer_name) return bias_evol def _bias_evol_std(self,", "if self.asymmetry_flag: xi += self.compute_xi_asymmetry(pk_lin, PktoXi_obj, params) return xi def compute_core(self, pk, PktoXi_obj,", "el in bb_config if el['func'] != 'broadband_sky'] for index, config in enumerate(normal_broadbands): #", "Output broadband \"\"\" rp = self._r * self._mu rt = self._r * np.sqrt(1", "lifetime = params['qso_rad_lifetime'] decrease = params['qso_rad_decrease'] # Compute the QSO radiation model xi_rad", "QSOxLYA # Does this work for the QSO auto as well? self.radiation_flag =", "Tracer name Returns ------- ND Array Bias evolution for tracer \"\"\" p0 =", "xi_rad def compute_xi_relativistic(self, pk, PktoXi_obj, params): \"\"\"Calculate the cross-correlation contribution from relativistic effects", "self._Omega_de = fiducial.get('Omega_de', None) if not config.getboolean('old_growth_func', False): self.xi_growth = self.compute_growth(self._z, self._z_fid, self._Omega_m,", "delta_rp) # Compute correlation function xi = PktoXi_obj.compute(rescaled_r, rescaled_mu, pk, self._multipole) return xi", "Xi params : dict Computation parameters Returns ------- 1D Array Output xi relativistic", "and standard asymmetry self.relativistic_flag = False if 'relativistic correction' in self._config: self.relativistic_flag =", "= np.arange(r_min, r_max + 1, dr) r2_powers = np.arange(mu_min, mu_max + 1, dmu)", "function xi = PktoXi_obj.compute(rescaled_r, rescaled_mu, pk, self._multipole) return xi @staticmethod def _rescale_coords(r, mu,", "PktoXi_obj, params): \"\"\"Compute the core of the correlation function. This does the Hankel", "bias_evol = self._get_tracer_evol(params, self._tracer1['name']) bias_evol *= self._get_tracer_evol(params, self._tracer2['name']) return bias_evol def _get_tracer_evol(self, params,", "z_fid is None: z_fid = self._z_fid if Omega_m is None: Omega_m = self._Omega_m", "auto as well? self.radiation_flag = False if 'radiation effects' in self._config: self.radiation_flag =", "tracer2 self._z_eff = fiducial['z_eff'] self._rel_z_evol = (1. + self._z) / (1 + self._z_eff)", "# Compute the QSO radiation model xi_rad = strength / (r_shift**2) * (1", "and check if it is QSOxLYA # Does this work for the QSO", "or (types[0] == types[1]): raise ValueError('You asked for relativistic effects or standard asymmetry,'", "self.bb_terms['pre-add'] = [] self.bb_terms['post-add'] = [] self.bb_terms['pre-mul'] = [] self.bb_terms['post-mul'] = [] #", "self._r * np.sqrt(1 - self._mu**2) scale = params[bb_term['name'] + '-scale-sky'] sigma = params[bb_term['name']", "at, delta_rp) # Compute the correlation function xi_rel = PktoXi_obj.pk_to_xi_relativistic(rescaled_r, rescaled_mu, pk, params)", "return xi_rad def compute_xi_relativistic(self, pk, PktoXi_obj, params): \"\"\"Calculate the cross-correlation contribution from relativistic", "the normal broadband terms normal_broadbands = [el for el in bb_config if el['func']", "= {} bb['name'] = name bb['func'] = config['func'] bb['rp_rt'] = config['rp_rt'] bb['r_config'] =", "* (1 + self._z_eff)**2) return bias_z def compute_growth(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): \"\"\"Compute", "This does the Hankel transform of the input P(k), sums the necessary multipoles", "isotropic power spectrum PktoXi_obj : vega.PktoXi An instance of the transform object used", ": list list with configs of broadband terms \"\"\" self.bb_terms = {} self.bb_terms['pre-add']", "= [el for el in bb_config if el['func'] == 'broadband_sky'] for index, config", "optional Delta radius_parallel - nuisance correction for wrong redshift, used for discrete tracers,", "parameters of this term name = 'BB-{}-{}-{}'.format(config['cf_name'], index + len(normal_broadbands), config['func']) # Create", "= 1 + corr elif 'mul' in pos_type: corr *= 1 + self.broadband(bb_term,", "= strength / (r_shift**2) * (1 - asymmetry * (1 - mu_shift**2)) xi_rad", "sky broadband term. Calculates a Gaussian broadband in rp,rt for the sky residuals.", "self.relativistic_flag or self.asymmetry_flag: types = [self._tracer1['type'], self._tracer2['type']] if ('continuous' not in types) or", "Array Bias evolution for tracer \"\"\" p0 = params['alpha_{}'.format(tracer_name)] bias_z = self._rel_z_evol**p0 return", "tracer \"\"\" # Compute the bias evolution bias_evol = self._get_tracer_evol(params, self._tracer1['name']) bias_evol *=", "only be applied to the cross (QSOxLya)') # Check for relativistic effects and", "a = 1/(1+z[i]) D1[i] = 5/2.*Omega_m*hubble(z[i], *pars)*quad(dD1, 0, a, args=pars)[0] D1 = interp1d(z,", "return xi @staticmethod def _rescale_coords(r, mu, ap, at, delta_rp=0.): \"\"\"Rescale Xi coordinates using", "rescaled_mu[mask] = rescaled_rp / rescaled_r[mask] return rescaled_r, rescaled_mu def compute_bias_evol(self, params): \"\"\"Compute bias", "Returns ------- 1D Array Output correlation function \"\"\" # Check for delta rp", "ValueError('You asked for relativistic effects or standard asymmetry,' ' but they only work", "rp delta_rp = 0. if self._delta_rp_name is not None: delta_rp = params.get(self._delta_rp_name, 0.)", "broadband term config params : dict Computation parameters Returns ------- 1d Array Output", "return np.sqrt(Omega_m*(1+z)**3 + Omega_de + (1-Omega_m-Omega_de)*(1+z)**2) def dD1(a, Omega_m, Omega_de): z = 1/a-1", "at each redshift on the grid growth = utils.growth_function(z_grid, Omega_m, Omega_de) # Scale", "(nbins-1) D1 = np.zeros(nbins, dtype=float) pars = (Omega_m, Omega_de) for i in range(nbins):", "self.compute_growth_old(self._z, self._z_fid, self._Omega_m, self._Omega_de) # Initialize the broadband self.has_bb = False if bb_config", "asked for QSO radiation effects, but it' ' can only be applied to", "it's sky or normal broadband if bb_term['func'] != 'broadband_sky': # Initialize the broadband", "rt = r[mask] * np.sqrt(1 - mu[mask]**2) rescaled_rp = ap * rp rescaled_rt", "if handle_name in self._config: evol_model = self._config.get(handle_name, 'standard') else: evol_model = self._config.get('z evol',", "r2_powers = np.arange(mu_min, mu_max + 1, dmu) bb_params = [] for i in", "None] * r2**r2_powers[None, :, None]).sum(axis=(0, 1, 2)) return corr def compute_qso_radiation(self, params): \"\"\"Model", "asymmetry self.relativistic_flag = False if 'relativistic correction' in self._config: self.relativistic_flag = self._config.getboolean('relativistic correction')", ": float Alpha parallel at : float Alpha transverse delta_rp : float, optional", "bb['r_config'] = config['r_config'] bb['mu_config'] = config['mu_config'] self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb) # Next", "None: growth = (1 + z_fid) / (1. + z_grid) return growth**2 #", "= [el for el in bb_config if el['func'] != 'broadband_sky'] for index, config", "self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu = self._rescale_coords(self._r, self._mu, ap, at, delta_rp) # Compute the", "100 values of z between 0 and zmax, then interpolate nbins = 100", "np.arange(mu_min, mu_max + 1, dmu) bb_params = [] for i in r1_powers: for", "self.radiation_flag: names = [self._tracer1['name'], self._tracer2['name']] if not ('QSO' in names and 'LYA' in", "D1 = interp1d(z, D1) growth = D1(z_grid) / D1(z_fid) return growth**2 def _init_broadband(self,", "is None: Omega_m = self._Omega_m if Omega_de is None: Omega_de = self._Omega_de #", "(rt / sigma)**2) w = (rp >= 0.) & (rp < bb_term['bin_size_rp']) corr[~w]", "xi @staticmethod def _rescale_coords(r, mu, ap, at, delta_rp=0.): \"\"\"Rescale Xi coordinates using ap/at.", "Computation parameters tracer_name : string Tracer name Returns ------- ND Array Bias evolution", "QSO radiation model parameters strength = params['qso_rad_strength'] asymmetry = params['qso_rad_asymmetry'] lifetime = params['qso_rad_lifetime']", "= np.zeros(len(mu)) rescaled_r[mask] = np.sqrt(rescaled_rp**2 + rescaled_rt**2) rescaled_mu[mask] = rescaled_rp / rescaled_r[mask] return", "radii ND Array Rescaled mu \"\"\" mask = r != 0 rp =", "* rp rescaled_rt = at * rt rescaled_r = np.zeros(len(r)) rescaled_mu = np.zeros(len(mu))", "= interp1d(z, D1) growth = D1(z_grid) / D1(z_fid) return growth**2 def _init_broadband(self, bb_config):", "energy if Omega_de is None: growth = (1 + z_fid) / (1. +", "scale = params[bb_term['name'] + '-scale-sky'] sigma = params[bb_term['name'] + '-sigma-sky'] corr = scale", "np.exp(-0.5 * (rt / sigma)**2) w = (rp >= 0.) & (rp <", "coords_grid : dict Dictionary with coordinate grid - r, mu, z scale_params :", "= metal_corr # Check if we need delta rp (Only for the cross)", "normal broadband terms normal_broadbands = [el for el in bb_config if el['func'] !=", "self._tracer2['name'] # Compute the shifted r and mu grids delta_rp = params.get(self._delta_rp_name, 0.)", "import quad from scipy.interpolate import interp1d from . import utils class CorrelationFunction: \"\"\"Correlation", "self._z = coords_grid['z'] self._multipole = config.getint('single_multipole', -1) self._tracer1 = tracer1 self._tracer2 = tracer2", "------- 1D Xi QSO radiation model \"\"\" assert 'QSO' in [self._tracer1['name'], self._tracer2['name']] assert", "if bb_term['func'] != 'broadband_sky': # Initialize the broadband and check # if we", "('continuous' not in types) or (types[0] == types[1]): raise ValueError('You asked for relativistic", "/ 100. * np.sqrt(1 - self._mu**2) r_min, r_max, dr = bb_term['r_config'] mu_min, mu_max,", "growth factor. Implements eq. 7.77 from <NAME>'s Modern Cosmology book. Returns ------- ND", "*= np.exp(-r_shift * ((1 + mu_shift) / lifetime + 1 / decrease)) return", "+ 1 / decrease)) return xi_rad def compute_xi_relativistic(self, pk, PktoXi_obj, params): \"\"\"Calculate the", "Check the defaults if z_grid is None: z_grid = self._z if z_fid is", "xi asymmetry \"\"\" assert 'continuous' in [self._tracer1['type'], self._tracer2['type']] assert self._tracer1['type'] != self._tracer2['type'] #", "rescaled_r, rescaled_mu = self._rescale_coords(self._r, self._mu, ap, at, delta_rp) # Compute the correlation function", "PktoXi_obj, params) # Add bias evolution xi *= self.compute_bias_evol(params) # Add growth xi", "xi_rad *= np.exp(-r_shift * ((1 + mu_shift) / lifetime + 1 / decrease))", "term dictionary bb = {} bb['name'] = name bb['func'] = config['func'] bb['bin_size_rp'] =", "r2 = self._r / 100. * np.sqrt(1 - self._mu**2) r_min, r_max, dr =", "rescaled_mu, pk, self._multipole) return xi @staticmethod def _rescale_coords(r, mu, ap, at, delta_rp=0.): \"\"\"Rescale", "if 'relativistic correction' in self._config: self.relativistic_flag = self._config.getboolean('relativistic correction') self.asymmetry_flag = False if", "for the parameters of this term name = 'BB-{}-{} {} {} {}'.format(config['cf_name'], index,", "config['pre'], config['rp_rt']) # Create the broadband term dictionary bb = {} bb['name'] =", "dict Computation parameters pos_type : string String with position and type, must be", "params['qso_rad_lifetime'] decrease = params['qso_rad_decrease'] # Compute the QSO radiation model xi_rad = strength", "the correlation function. This does the Hankel transform of the input P(k), sums", "if corr is None: corr = self.broadband_sky(bb_term, params) if 'mul' in pos_type: corr", "the core xi = self.compute_core(pk, PktoXi_obj, params) # Add bias evolution xi *=", "<gh_stars>0 import numpy as np from scipy.integrate import quad from scipy.interpolate import interp1d", "self.compute_qso_radiation(params) # Add relativistic effects if self.relativistic_flag: xi += self.compute_xi_relativistic(pk_lin, PktoXi_obj, params) #", "interp1d from . import utils class CorrelationFunction: \"\"\"Correlation function computation and handling. #", "QSO auto as well? self.radiation_flag = False if 'radiation effects' in self._config: self.radiation_flag", "\"\"\" rp = self._r * self._mu rt = self._r * np.sqrt(1 - self._mu**2)", "be kept in init as that is only called once # ! Compute", "for index, config in enumerate(sky_broadbands): assert config['rp_rt'] == 'rp,rt' # Create the name", "None] * r1**r1_powers[:, None, None] * r2**r2_powers[None, :, None]).sum(axis=(0, 1, 2)) return corr", "Check for QSO radiation modeling and check if it is QSOxLYA # Does", "term. Calculates a power-law broadband in r and mu or rp,rt. Parameters ----------", "= self._r / 100. r2 = self._mu if bb_term['rp_rt'] == 'rp,rt': r1 =", "compute_bias_evol(self, params): \"\"\"Compute bias evolution for the correlation function. Parameters ---------- params :", "or rp,rt. Parameters ---------- bb_term : dict broadband term config params : dict", "QSO radiation model xi_rad = strength / (r_shift**2) * (1 - asymmetry *", "Rescaled mu \"\"\" mask = r != 0 rp = r[mask] * mu[mask]", "r1 = self._r / 100. * self._mu r2 = self._r / 100. *", "correlation function for input P(k). Parameters ---------- pk : ND Array Input power", "\"\"\"Bias evolution Croom model for QSO, see Croom et al. 2005. Parameters ----------", "of the transform object used to turn Pk into Xi params : dict", "el['func'] == 'broadband_sky'] for index, config in enumerate(sky_broadbands): assert config['rp_rt'] == 'rp,rt' #", "right pos/type configuration for bb_term in self.bb_terms[pos_type]: # Check if it's sky or", "and check # if we need to add or multiply if corr is", "self._z_fid = fiducial['z_fiducial'] self._Omega_m = fiducial.get('Omega_m', None) self._Omega_de = fiducial.get('Omega_de', None) if not", "Parameters ---------- params : dict Computation parameters tracer_name : string Name of tracer", "pos/type configuration for bb_term in self.bb_terms[pos_type]: # Check if it's sky or normal", "config.getboolean('old_growth_func', False): self.xi_growth = self.compute_growth(self._z, self._z_fid, self._Omega_m, self._Omega_de) else: self.xi_growth = self.compute_growth_old(self._z, self._z_fid,", "self._mu**2) scale = params[bb_term['name'] + '-scale-sky'] sigma = params[bb_term['name'] + '-sigma-sky'] corr =", "params): \"\"\"Model the contribution of QSO radiation to the cross (the transverse proximity", "is None: Omega_de = self._Omega_de # Check if we have dark energy if", "rescaled_r, rescaled_mu def compute_bias_evol(self, params): \"\"\"Compute bias evolution for the correlation function. Parameters", "bb_config is not None: self._init_broadband(bb_config) self.has_bb = True # Check for QSO radiation", "radiation effects, but it' ' can only be applied to the cross (QSOxLya)')", "---------- r : ND array Array of radius coords of Xi mu :", "xi_rad = strength / (r_shift**2) * (1 - asymmetry * (1 - mu_shift**2))", "Precompute growth self._z_fid = fiducial['z_fiducial'] self._Omega_m = fiducial.get('Omega_m', None) self._Omega_de = fiducial.get('Omega_de', None)", "rescaled_r = np.zeros(len(r)) rescaled_mu = np.zeros(len(mu)) rescaled_r[mask] = np.sqrt(rescaled_rp**2 + rescaled_rt**2) rescaled_mu[mask] =", "bb = {} bb['name'] = name bb['func'] = config['func'] bb['rp_rt'] = config['rp_rt'] bb['r_config']", "rescaled_rt = at * rt rescaled_r = np.zeros(len(r)) rescaled_mu = np.zeros(len(mu)) rescaled_r[mask] =", "in pos_type: corr = 1. else: corr = 0. return corr def broadband_sky(self,", "rescaled_rp = ap * rp rescaled_rt = at * rt rescaled_r = np.zeros(len(r))", "this is a metal correlation, by default False \"\"\" self._config = config self._r", "term. Calculates a Gaussian broadband in rp,rt for the sky residuals. Parameters ----------", "term dictionary bb = {} bb['name'] = name bb['func'] = config['func'] bb['rp_rt'] =", "for bb_term in self.bb_terms[pos_type]: # Check if it's sky or normal broadband if", "applied to the cross (QSOxLya)') # Check for relativistic effects and standard asymmetry", "or 'post-mul' or 'post-add' Returns ------- 1d Array Output broadband \"\"\" assert pos_type", "[] # First pick up the normal broadband terms normal_broadbands = [el for", "def __init__(self, config, fiducial, coords_grid, scale_params, tracer1, tracer2, bb_config=None, metal_corr=False): \"\"\" Parameters ----------", "*= 1 + self.broadband_sky(bb_term, params) else: corr += self.broadband_sky(bb_term, params) # Give defaults", "pick up the sky broadban terms sky_broadbands = [el for el in bb_config", "self._tracer1['name']) bias_evol *= self._get_tracer_evol(params, self._tracer2['name']) return bias_evol def _get_tracer_evol(self, params, tracer_name): \"\"\"Compute tracer", "config['type'], config['pre'], config['rp_rt']) # Create the broadband term dictionary bb = {} bb['name']", "# if we need to add or multiply if corr is None: corr", "= params[\"croom_par0\"] p1 = params[\"croom_par1\"] bias_z = (p0 + p1*(1. + self._z)**2) /", "Array Output correlation function \"\"\" # Check for delta rp delta_rp = 0.", "then interpolate nbins = 100 zmax = 5. z = zmax * np.arange(nbins,", "@staticmethod def _rescale_coords(r, mu, ap, at, delta_rp=0.): \"\"\"Rescale Xi coordinates using ap/at. Parameters", "dict Config of tracer 1 tracer2 : dict Config of tracer 2 bb_config", "check # if we need to add or multiply if corr is None:", "= config['mu_config'] self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb) # Next pick up the sky", "Get the QSO radiation model parameters strength = params['qso_rad_strength'] asymmetry = params['qso_rad_asymmetry'] lifetime", "config.getint('single_multipole', -1) self._tracer1 = tracer1 self._tracer2 = tracer2 self._z_eff = fiducial['z_eff'] self._rel_z_evol =", "Pk into Xi params : dict Computation parameters Returns ------- 1D Array Output", "evolution Croom model for QSO, see Croom et al. 2005. Parameters ---------- params", "def compute_xi_relativistic(self, pk, PktoXi_obj, params): \"\"\"Calculate the cross-correlation contribution from relativistic effects (Bonvin", "rescaled_r, rescaled_mu = self._rescale_coords(self._r, self._mu, ap, at, delta_rp) # Compute correlation function xi", "self._Omega_m, self._Omega_de) else: self.xi_growth = self.compute_growth_old(self._z, self._z_fid, self._Omega_m, self._Omega_de) # Initialize the broadband", "rescales the coordinates Parameters ---------- pk : ND Array Input power spectrum PktoXi_obj", "times and should be fast Extensions should have their separate method of the", "for QSO radiation modeling and check if it is QSOxLYA # Does this", "evolution xi *= self.compute_bias_evol(params) # Add growth xi *= self.xi_growth # Add QSO", "bb['rp_rt'] = config['rp_rt'] bb['r_config'] = config['r_config'] bb['mu_config'] = config['mu_config'] self.bb_terms[config['pre'] + \"-\" +", "100 zmax = 5. z = zmax * np.arange(nbins, dtype=float) / (nbins-1) D1", "they only work for the cross') def compute(self, pk, pk_lin, PktoXi_obj, params): \"\"\"Compute", "bb_term['bin_size_rp']) corr[~w] = 0. return corr def broadband(self, bb_term, params): \"\"\"Compute broadband term.", "self._tracer2['name']] if not ('QSO' in names and 'LYA' in names): raise ValueError('You asked", "self._z_fid, self._Omega_m, self._Omega_de) # Initialize the broadband self.has_bb = False if bb_config is", "\"\"\"Compute correlation function for input P(k). Parameters ---------- pk : ND Array Input", "2014). Parameters ---------- pk : ND Array Input power spectrum PktoXi_obj : vega.PktoXi", "delta_rp rt = self._r * np.sqrt(1 - self._mu**2) r_shift = np.sqrt(rp**2 + rt**2)", "'pre-mul' or 'pre-add' or 'post-mul' or 'post-add' Returns ------- 1d Array Output broadband", "the cross) self._delta_rp_name = None if tracer1['type'] == 'discrete' and tracer2['type'] != 'discrete':", "self._multipole) return xi @staticmethod def _rescale_coords(r, mu, ap, at, delta_rp=0.): \"\"\"Rescale Xi coordinates", "the parameters of this term name = 'BB-{}-{} {} {} {}'.format(config['cf_name'], index, config['type'],", "rescaled_mu = self._rescale_coords(self._r, self._mu, ap, at, delta_rp) # Compute the correlation function xi_rel", "(Omega_m, Omega_de) for i in range(nbins): a = 1/(1+z[i]) D1[i] = 5/2.*Omega_m*hubble(z[i], *pars)*quad(dD1,", "broadband in rp,rt for the sky residuals. Parameters ---------- bb_term : dict broadband", "of broadband terms, by default None metal_corr : bool, optional Whether this is", ": dict Computation parameters tracer_name : string Name of tracer Returns ------- ND", "parameters Returns ------- 1D Xi QSO radiation model \"\"\" assert 'QSO' in [self._tracer1['name'],", "[] self.bb_terms['pre-mul'] = [] self.bb_terms['post-mul'] = [] # First pick up the normal", "Returns ------- 1D Xi QSO radiation model \"\"\" assert 'QSO' in [self._tracer1['name'], self._tracer2['name']]", "function. This does the Hankel transform of the input P(k), sums the necessary", "Array Output xi asymmetry \"\"\" assert 'continuous' in [self._tracer1['type'], self._tracer2['type']] assert self._tracer1['type'] !=", "if we need delta rp (Only for the cross) self._delta_rp_name = None if", "factor. Implements eq. 7.77 from <NAME>'s Modern Cosmology book. Returns ------- ND Array", "Array Bias evolution for tracer \"\"\" # Compute the bias evolution bias_evol =", "= self._r * self._mu rt = self._r * np.sqrt(1 - self._mu**2) scale =", "------- ND Array Bias evolution for tracer \"\"\" # Compute the bias evolution", "the QSO radiation model xi_rad = strength / (r_shift**2) * (1 - asymmetry", "turn Pk into Xi params : dict Computation parameters Returns ------- 1D Array", "metal_corr=self._metal_corr) rescaled_r, rescaled_mu = self._rescale_coords(self._r, self._mu, ap, at, delta_rp) # Compute the correlation", "---------- params : dict Computation parameters Returns ------- ND Array Bias evolution for", "in r and mu or rp,rt. Parameters ---------- bb_term : dict broadband term", "= r[mask] * mu[mask] + delta_rp rt = r[mask] * np.sqrt(1 - mu[mask]**2)", "\"\"\"Rescale Xi coordinates using ap/at. Parameters ---------- r : ND array Array of", "handle_name in self._config: evol_model = self._config.get(handle_name, 'standard') else: evol_model = self._config.get('z evol', 'standard')", "zmax, then interpolate nbins = 100 zmax = 5. z = zmax *", "= self._config.getboolean('standard asymmetry') if self.relativistic_flag or self.asymmetry_flag: types = [self._tracer1['type'], self._tracer2['type']] if ('continuous'", "for relativistic effects and standard asymmetry self.relativistic_flag = False if 'relativistic correction' in", "------- 1D Array Output correlation function \"\"\" # Check for delta rp delta_rp", "assert config['rp_rt'] == 'rp,rt' # Create the name for the parameters of this", "params): \"\"\"Compute sky broadband term. Calculates a Gaussian broadband in rp,rt for the", "100. * self._mu r2 = self._r / 100. * np.sqrt(1 - self._mu**2) r_min,", "= 5/2.*Omega_m*hubble(z[i], *pars)*quad(dD1, 0, a, args=pars)[0] D1 = interp1d(z, D1) growth = D1(z_grid)", "bb_config if el['func'] != 'broadband_sky'] for index, config in enumerate(normal_broadbands): # Create the", ": float Alpha transverse delta_rp : float, optional Delta radius_parallel - nuisance correction", "* self._mu rt = self._r * np.sqrt(1 - self._mu**2) scale = params[bb_term['name'] +", "the broadband term dictionary bb = {} bb['name'] = name bb['func'] = config['func']", "z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): \"\"\"Compute growth factor. Implements eq. 7.77 from <NAME>'s Modern", "in evol_model: bias_evol = self._bias_evol_croom(params, tracer_name) else: bias_evol = self._bias_evol_std(params, tracer_name) return bias_evol", "= zmax * np.arange(nbins, dtype=float) / (nbins-1) D1 = np.zeros(nbins, dtype=float) pars =", "np.zeros(nbins, dtype=float) pars = (Omega_m, Omega_de) for i in range(nbins): a = 1/(1+z[i])", "w = (rp >= 0.) & (rp < bb_term['bin_size_rp']) corr[~w] = 0. return", "standard model. Parameters ---------- params : dict Computation parameters tracer_name : string Tracer", "rp,rt. Parameters ---------- bb_term : dict broadband term config params : dict Computation", "= 1/a-1 return 1./(a*hubble(z, Omega_m, Omega_de))**3 # Calculate D1 in 100 values of", "= self._r * self._mu + delta_rp rt = self._r * np.sqrt(1 - self._mu**2)", "with position and type, must be one of: 'pre-mul' or 'pre-add' or 'post-mul'", "in range(nbins): a = 1/(1+z[i]) D1[i] = 5/2.*Omega_m*hubble(z[i], *pars)*quad(dD1, 0, a, args=pars)[0] D1", "def broadband_sky(self, bb_term, params): \"\"\"Compute sky broadband term. Calculates a Gaussian broadband in", "/ (1. + z_grid) return growth**2 # Compute the growth at each redshift", "correlation, by default False \"\"\" self._config = config self._r = coords_grid['r'] self._mu =", "+= self.compute_qso_radiation(params) # Add relativistic effects if self.relativistic_flag: xi += self.compute_xi_relativistic(pk_lin, PktoXi_obj, params)", ": dict Computation parameters pos_type : string String with position and type, must", "broadband terms, by default None metal_corr : bool, optional Whether this is a", "String with position and type, must be one of: 'pre-mul' or 'pre-add' or", ": ND Array Input power spectrum pk_lin : 1D Array Linear isotropic power", "list with configs of broadband terms, by default None metal_corr : bool, optional", "bb_term : dict broadband term config params : dict Computation parameters Returns -------", "---------- bb_config : list list with configs of broadband terms \"\"\" self.bb_terms =", "------- ND Array Bias evolution for tracer \"\"\" assert tracer_name == \"QSO\" p0", "Alpha transverse delta_rp : float, optional Delta radius_parallel - nuisance correction for wrong", "return growth**2 def _init_broadband(self, bb_config): \"\"\"Initialize the broadband terms. Parameters ---------- bb_config :", "1d Array Output broadband \"\"\" assert pos_type in ['pre-mul', 'pre-add', 'post-mul', 'post-add'] corr", "else: corr = 0. return corr def broadband_sky(self, bb_term, params): \"\"\"Compute sky broadband", "= PktoXi_obj.compute(rescaled_r, rescaled_mu, pk, self._multipole) return xi @staticmethod def _rescale_coords(r, mu, ap, at,", "QSO radiation to the cross (the transverse proximity effect) Parameters ---------- params :", "rescaled_mu, pk, params) return xi_rel def compute_xi_asymmetry(self, pk, PktoXi_obj, params): \"\"\"Calculate the cross-correlation", "corr = (bb_params[:, :, None, None] * r1**r1_powers[:, None, None] * r2**r2_powers[None, :,", "radius_parallel - nuisance correction for wrong redshift, used for discrete tracers, by default", "evolution bias_evol = self._get_tracer_evol(params, self._tracer1['name']) bias_evol *= self._get_tracer_evol(params, self._tracer2['name']) return bias_evol def _get_tracer_evol(self,", "Check if we have dark energy if Omega_de is None: growth = (1", "asymmetry * (1 - mu_shift**2)) xi_rad *= np.exp(-r_shift * ((1 + mu_shift) /", "None) if not config.getboolean('old_growth_func', False): self.xi_growth = self.compute_growth(self._z, self._z_fid, self._Omega_m, self._Omega_de) else: self.xi_growth", "= self._config.get('z evol', 'standard') # Compute the bias evolution using the right model", "pars = (Omega_m, Omega_de) for i in range(nbins): a = 1/(1+z[i]) D1[i] =", "Bias evolution for tracer \"\"\" # Compute the bias evolution bias_evol = self._get_tracer_evol(params,", "params, tracer_name): \"\"\"Bias evolution standard model. Parameters ---------- params : dict Computation parameters", "we have dark energy if Omega_de is None: growth = (1 + z_fid)", ": list, optional list with configs of broadband terms, by default None metal_corr", "Whether this is a metal correlation, by default False \"\"\" self._config = config", "for tracer \"\"\" # Compute the bias evolution bias_evol = self._get_tracer_evol(params, self._tracer1['name']) bias_evol", "corr def compute_qso_radiation(self, params): \"\"\"Model the contribution of QSO radiation to the cross", "one of: 'pre-mul' or 'pre-add' or 'post-mul' or 'post-add' Returns ------- 1d Array", "is QSOxLYA # Does this work for the QSO auto as well? self.radiation_flag", "= (1 + z_fid) / (1. + z_grid) return growth**2 # Compute the", "coordinates ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu = self._rescale_coords(self._r, self._mu, ap, at,", "= (p0 + p1*(1. + self._z)**2) / (p0 + p1 * (1 +", "and 'LYA' in names): raise ValueError('You asked for QSO radiation effects, but it'", "config, fiducial, coords_grid, scale_params, tracer1, tracer2, bb_config=None, metal_corr=False): \"\"\" Parameters ---------- config :", "1D Array Output correlation function \"\"\" # Compute the core xi = self.compute_core(pk,", "r_min, r_max, dr = bb_term['r_config'] mu_min, mu_max, dmu = bb_term['mu_config'] r1_powers = np.arange(r_min,", "dr = bb_term['r_config'] mu_min, mu_max, dmu = bb_term['mu_config'] r1_powers = np.arange(r_min, r_max +", "Name of tracer Returns ------- ND Array Bias evolution for tracer \"\"\" handle_name", "Bias evolution for tracer \"\"\" p0 = params['alpha_{}'.format(tracer_name)] bias_z = self._rel_z_evol**p0 return bias_z", "optional Whether this is a metal correlation, by default False \"\"\" self._config =", "relativistic \"\"\" assert 'continuous' in [self._tracer1['type'], self._tracer2['type']] assert self._tracer1['type'] != self._tracer2['type'] # Get", "rp/r coords of Xi ap : float Alpha parallel at : float Alpha", "init as that is only called once # ! Compute is called many", "xi = self.compute_core(pk, PktoXi_obj, params) # Add bias evolution xi *= self.compute_bias_evol(params) #", "terms sky_broadbands = [el for el in bb_config if el['func'] == 'broadband_sky'] for", "the right pos/type configuration for bb_term in self.bb_terms[pos_type]: # Check if it's sky", "= 1 + corr elif 'mul' in pos_type: corr *= 1 + self.broadband_sky(bb_term,", "xi_rel = PktoXi_obj.pk_to_xi_relativistic(rescaled_r, rescaled_mu, pk, params) return xi_rel def compute_xi_asymmetry(self, pk, PktoXi_obj, params):", "+ 1, dmu) bb_params = [] for i in r1_powers: for j in", "the form 'compute_extension' that can be called from outside \"\"\" def __init__(self, config,", "of Xi ap : float Alpha parallel at : float Alpha transverse delta_rp", "relativistic effects and standard asymmetry self.relativistic_flag = False if 'relativistic correction' in self._config:", "# ! Compute is called many times and should be fast Extensions should", "return growth**2 def compute_growth_old(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): def hubble(z, Omega_m, Omega_de): return", "\"\"\" # Check for delta rp delta_rp = 0. if self._delta_rp_name is not", ": dict Computation parameters Returns ------- 1D Array Output xi asymmetry \"\"\" assert", "assert 'continuous' in [self._tracer1['type'], self._tracer2['type']] assert self._tracer1['type'] != self._tracer2['type'] # Get rescaled Xi", "[self._tracer1['name'], self._tracer2['name']] assert self._tracer1['name'] != self._tracer2['name'] # Compute the shifted r and mu", "if Omega_de is None: growth = (1 + z_fid) / (1. + z_grid)", "= config['r_config'] bb['mu_config'] = config['mu_config'] self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb) # Next pick", "coordinates Parameters ---------- pk : ND Array Input power spectrum PktoXi_obj : vega.PktoXi", "# First pick up the normal broadband terms normal_broadbands = [el for el", "Omega_m, Omega_de) # Scale to the fiducial redshift growth /= utils.growth_function(z_fid, Omega_m, Omega_de)", "p1*(1. + self._z)**2) / (p0 + p1 * (1 + self._z_eff)**2) return bias_z", "corr is None: corr = self.broadband(bb_term, params) if 'mul' in pos_type: corr =", "bb_term, params): \"\"\"Compute sky broadband term. Calculates a Gaussian broadband in rp,rt for", "self._r / 100. r2 = self._mu if bb_term['rp_rt'] == 'rp,rt': r1 = self._r", "= self._Omega_m if Omega_de is None: Omega_de = self._Omega_de # Check if we", "xi_rel def compute_xi_asymmetry(self, pk, PktoXi_obj, params): \"\"\"Calculate the cross-correlation contribution from standard asymmetry", "compute_broadband(self, params, pos_type): \"\"\"Compute the broadband terms for one position (pre-distortion/post-distortion) and one", "({},{})'.format( bb_term['name'], i, j)]) bb_params = np.array(bb_params).reshape(-1, r_max - r_min + 1) corr", "growth = utils.growth_function(z_grid, Omega_m, Omega_de) # Scale to the fiducial redshift growth /=", "Compute correlation function xi = PktoXi_obj.compute(rescaled_r, rescaled_mu, pk, self._multipole) return xi @staticmethod def", "self._config: self.radiation_flag = self._config.getboolean('radiation effects') if self.radiation_flag: names = [self._tracer1['name'], self._tracer2['name']] if not", "name Returns ------- ND Array Bias evolution for tracer \"\"\" p0 = params['alpha_{}'.format(tracer_name)]", "= self._rescale_coords(self._r, self._mu, ap, at, delta_rp) # Compute the correlation function xi_asy =", "(1 - asymmetry * (1 - mu_shift**2)) xi_rad *= np.exp(-r_shift * ((1 +", "dark energy if Omega_de is None: growth = (1 + z_fid) / (1.", "bb_term['func'] != 'broadband_sky': # Initialize the broadband and check # if we need", "else: bias_evol = self._bias_evol_std(params, tracer_name) return bias_evol def _bias_evol_std(self, params, tracer_name): \"\"\"Bias evolution", "input P(k). Parameters ---------- pk : ND Array Input power spectrum pk_lin :", "(QSOxLya)') # Check for relativistic effects and standard asymmetry self.relativistic_flag = False if", "Add relativistic effects if self.relativistic_flag: xi += self.compute_xi_relativistic(pk_lin, PktoXi_obj, params) # Add standard", "ND Array Growth factor \"\"\" # Check the defaults if z_grid is None:", "config['func'] bb['bin_size_rp'] = config['bin_size_rp'] self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb) def compute_broadband(self, params, pos_type):", "- self._mu**2) scale = params[bb_term['name'] + '-scale-sky'] sigma = params[bb_term['name'] + '-sigma-sky'] corr", "np.sqrt(rescaled_rp**2 + rescaled_rt**2) rescaled_mu[mask] = rescaled_rp / rescaled_r[mask] return rescaled_r, rescaled_mu def compute_bias_evol(self,", "= r[mask] * np.sqrt(1 - mu[mask]**2) rescaled_rp = ap * rp rescaled_rt =", "power spectrum pk_lin : 1D Array Linear isotropic power spectrum PktoXi_obj : vega.PktoXi", "params) if 'mul' in pos_type: corr = 1 + corr elif 'mul' in", "= np.arange(mu_min, mu_max + 1, dmu) bb_params = [] for i in r1_powers:", "* r1**r1_powers[:, None, None] * r2**r2_powers[None, :, None]).sum(axis=(0, 1, 2)) return corr def", "0. if self._delta_rp_name is not None: delta_rp = params.get(self._delta_rp_name, 0.) # Get rescaled", "_init_broadband(self, bb_config): \"\"\"Initialize the broadband terms. Parameters ---------- bb_config : list list with", "Computation parameters tracer_name : string Name of tracer Returns ------- ND Array Bias", "for the cross') def compute(self, pk, pk_lin, PktoXi_obj, params): \"\"\"Compute correlation function for", "'BB-{}-{}-{}'.format(config['cf_name'], index + len(normal_broadbands), config['func']) # Create the broadband term dictionary bb =", "the coordinates Parameters ---------- pk : ND Array Input power spectrum PktoXi_obj :", "r, mu, z scale_params : ScaleParameters ScaleParameters object tracer1 : dict Config of", "Computation parameters pos_type : string String with position and type, must be one", "params['qso_rad_asymmetry'] lifetime = params['qso_rad_lifetime'] decrease = params['qso_rad_decrease'] # Compute the QSO radiation model", "corr = self.broadband(bb_term, params) if 'mul' in pos_type: corr = 1 + corr", "Input power spectrum PktoXi_obj : vega.PktoXi An instance of the transform object used", "rp = r[mask] * mu[mask] + delta_rp rt = r[mask] * np.sqrt(1 -", "Compute the bias evolution bias_evol = self._get_tracer_evol(params, self._tracer1['name']) bias_evol *= self._get_tracer_evol(params, self._tracer2['name']) return", "parameters Returns ------- 1D Array Output correlation function \"\"\" # Check for delta", "= False if 'relativistic correction' in self._config: self.relativistic_flag = self._config.getboolean('relativistic correction') self.asymmetry_flag =", "- asymmetry * (1 - mu_shift**2)) xi_rad *= np.exp(-r_shift * ((1 + mu_shift)", "((1 + mu_shift) / lifetime + 1 / decrease)) return xi_rad def compute_xi_relativistic(self,", "'z evol {}'.format(tracer_name) if handle_name in self._config: evol_model = self._config.get(handle_name, 'standard') else: evol_model", "correlation function. This does the Hankel transform of the input P(k), sums the", "def _bias_evol_std(self, params, tracer_name): \"\"\"Bias evolution standard model. Parameters ---------- params : dict", "params[bb_term['name'] + '-sigma-sky'] corr = scale / (sigma * np.sqrt(2. * np.pi)) corr", "np.sqrt(1 - self._mu**2) scale = params[bb_term['name'] + '-scale-sky'] sigma = params[bb_term['name'] + '-sigma-sky']", "standard asymmetry self.relativistic_flag = False if 'relativistic correction' in self._config: self.relativistic_flag = self._config.getboolean('relativistic", ": bool, optional Whether this is a metal correlation, by default False \"\"\"", "QSO radiation modeling and check if it is QSOxLYA # Does this work", "config['rp_rt'] == 'rp,rt' # Create the name for the parameters of this term", "Bias evolution for tracer \"\"\" handle_name = 'z evol {}'.format(tracer_name) if handle_name in", "term config params : dict Computation parameters Returns ------- 1d Array Output broadband", "params): \"\"\"Compute correlation function for input P(k). Parameters ---------- pk : ND Array", "standard asymmetry (Bonvin et al. 2014). Parameters ---------- pk : ND Array Input", "------- ND Array Rescaled radii ND Array Rescaled mu \"\"\" mask = r", "z_fid = self._z_fid if Omega_m is None: Omega_m = self._Omega_m if Omega_de is", "sigma)**2) w = (rp >= 0.) & (rp < bb_term['bin_size_rp']) corr[~w] = 0.", "= bb_term['r_config'] mu_min, mu_max, dmu = bb_term['mu_config'] r1_powers = np.arange(r_min, r_max + 1,", "multiply if corr is None: corr = self.broadband_sky(bb_term, params) if 'mul' in pos_type:", "scale_params : ScaleParameters ScaleParameters object tracer1 : dict Config of tracer 1 tracer2", "zmax * np.arange(nbins, dtype=float) / (nbins-1) D1 = np.zeros(nbins, dtype=float) pars = (Omega_m,", "tracer2, bb_config=None, metal_corr=False): \"\"\" Parameters ---------- config : ConfigParser model section of config", "P(k), sums the necessary multipoles and rescales the coordinates Parameters ---------- pk :", "self.broadband(bb_term, params) else: # Initialize the broadband and check # if we need", "\"\"\" mask = r != 0 rp = r[mask] * mu[mask] + delta_rp", "tracers, by default 0. Returns ------- ND Array Rescaled radii ND Array Rescaled", "correlation function \"\"\" # Check for delta rp delta_rp = 0. if self._delta_rp_name", "dict Computation parameters Returns ------- 1d Array Output broadband \"\"\" rp = self._r", "None metal_corr : bool, optional Whether this is a metal correlation, by default", "radiation to the cross (the transverse proximity effect) Parameters ---------- params : dict", "rp / r_shift # Get the QSO radiation model parameters strength = params['qso_rad_strength']", "Array Rescaled mu \"\"\" mask = r != 0 rp = r[mask] *", "shifted r and mu grids delta_rp = params.get(self._delta_rp_name, 0.) rp = self._r *", "2 bb_config : list, optional list with configs of broadband terms, by default", "should be fast Extensions should have their separate method of the form 'compute_extension'", "transform of the input P(k), sums the necessary multipoles and rescales the coordinates", "Xi params : dict Computation parameters Returns ------- 1D Array Output xi asymmetry", "mask = r != 0 rp = r[mask] * mu[mask] + delta_rp rt", "once # ! Compute is called many times and should be fast Extensions", "coords_grid['z'] self._multipole = config.getint('single_multipole', -1) self._tracer1 = tracer1 self._tracer2 = tracer2 self._z_eff =", "terms \"\"\" self.bb_terms = {} self.bb_terms['pre-add'] = [] self.bb_terms['post-add'] = [] self.bb_terms['pre-mul'] =", "asymmetry') if self.relativistic_flag or self.asymmetry_flag: types = [self._tracer1['type'], self._tracer2['type']] if ('continuous' not in", "1D Array Output xi asymmetry \"\"\" assert 'continuous' in [self._tracer1['type'], self._tracer2['type']] assert self._tracer1['type']", "pk : ND Array Input power spectrum pk_lin : 1D Array Linear isotropic", "Xi mu : ND array Array of mu = rp/r coords of Xi", "with configs of broadband terms \"\"\" self.bb_terms = {} self.bb_terms['pre-add'] = [] self.bb_terms['post-add']", "the sky residuals. Parameters ---------- bb_term : dict broadband term config params :", "/ (nbins-1) D1 = np.zeros(nbins, dtype=float) pars = (Omega_m, Omega_de) for i in", "= 0. return corr def broadband_sky(self, bb_term, params): \"\"\"Compute sky broadband term. Calculates", "= coords_grid['z'] self._multipole = config.getint('single_multipole', -1) self._tracer1 = tracer1 self._tracer2 = tracer2 self._z_eff", "2005. Parameters ---------- params : dict Computation parameters tracer_name : string Tracer name", "None: Omega_m = self._Omega_m if Omega_de is None: Omega_de = self._Omega_de # Check", "fiducial config coords_grid : dict Dictionary with coordinate grid - r, mu, z", "self._config.getboolean('relativistic correction') self.asymmetry_flag = False if 'standard asymmetry' in self._config: self.asymmetry_flag = self._config.getboolean('standard", "+= self.broadband_sky(bb_term, params) # Give defaults if corr is still None if corr", "= True # Check for QSO radiation modeling and check if it is", "raise ValueError('You asked for relativistic effects or standard asymmetry,' ' but they only", "PktoXi_obj.pk_to_xi_relativistic(rescaled_r, rescaled_mu, pk, params) return xi_rel def compute_xi_asymmetry(self, pk, PktoXi_obj, params): \"\"\"Calculate the", "their separate method of the form 'compute_extension' that can be called from outside", "def _rescale_coords(r, mu, ap, at, delta_rp=0.): \"\"\"Rescale Xi coordinates using ap/at. Parameters ----------", "sigma = params[bb_term['name'] + '-sigma-sky'] corr = scale / (sigma * np.sqrt(2. *", "effects (Bonvin et al. 2014). Parameters ---------- pk : ND Array Input power", "self._r / 100. * np.sqrt(1 - self._mu**2) r_min, r_max, dr = bb_term['r_config'] mu_min,", "z_grid is None: z_grid = self._z if z_fid is None: z_fid = self._z_fid", "self._rel_z_evol**p0 return bias_z def _bias_evol_croom(self, params, tracer_name): \"\"\"Bias evolution Croom model for QSO,", "(the transverse proximity effect) Parameters ---------- params : dict Computation parameters Returns -------", "tracer2['name'] # Precompute growth self._z_fid = fiducial['z_fiducial'] self._Omega_m = fiducial.get('Omega_m', None) self._Omega_de =", "multipoles and rescales the coordinates Parameters ---------- pk : ND Array Input power", "and mu or rp,rt. Parameters ---------- bb_term : dict broadband term config params", "['pre-mul', 'pre-add', 'post-mul', 'post-add'] corr = None # Loop over the right pos/type", "dmu = bb_term['mu_config'] r1_powers = np.arange(r_min, r_max + 1, dr) r2_powers = np.arange(mu_min,", "as np from scipy.integrate import quad from scipy.interpolate import interp1d from . import", "# Compute correlation function xi = PktoXi_obj.compute(rescaled_r, rescaled_mu, pk, self._multipole) return xi @staticmethod", "ap, at, delta_rp=0.): \"\"\"Rescale Xi coordinates using ap/at. Parameters ---------- r : ND", "np.zeros(len(r)) rescaled_mu = np.zeros(len(mu)) rescaled_r[mask] = np.sqrt(rescaled_rp**2 + rescaled_rt**2) rescaled_mu[mask] = rescaled_rp /", "broadband terms normal_broadbands = [el for el in bb_config if el['func'] != 'broadband_sky']", "the cross-correlation contribution from relativistic effects (Bonvin et al. 2014). Parameters ---------- pk", "redshift growth /= utils.growth_function(z_fid, Omega_m, Omega_de) return growth**2 def compute_growth_old(self, z_grid=None, z_fid=None, Omega_m=None,", "self._z) / (1 + self._z_eff) self._scale_params = scale_params self._metal_corr = metal_corr # Check", "dtype=float) pars = (Omega_m, Omega_de) for i in range(nbins): a = 1/(1+z[i]) D1[i]", "called many times and should be fast Extensions should have their separate method", "Array Output broadband \"\"\" assert pos_type in ['pre-mul', 'pre-add', 'post-mul', 'post-add'] corr =", "tracer_name) return bias_evol def _bias_evol_std(self, params, tracer_name): \"\"\"Bias evolution standard model. Parameters ----------", "if it is QSOxLYA # Does this work for the QSO auto as", "type, must be one of: 'pre-mul' or 'pre-add' or 'post-mul' or 'post-add' Returns", "Compute is called many times and should be fast Extensions should have their", "\"\"\" handle_name = 'z evol {}'.format(tracer_name) if handle_name in self._config: evol_model = self._config.get(handle_name,", "dict Computation parameters tracer_name : string Name of tracer Returns ------- ND Array", ": dict Config of tracer 2 bb_config : list, optional list with configs", "grid - r, mu, z scale_params : ScaleParameters ScaleParameters object tracer1 : dict", "else: # Initialize the broadband and check # if we need to add", "False if bb_config is not None: self._init_broadband(bb_config) self.has_bb = True # Check for", "effects' in self._config: self.radiation_flag = self._config.getboolean('radiation effects') if self.radiation_flag: names = [self._tracer1['name'], self._tracer2['name']]", "'mul' in pos_type: corr = 1 + corr elif 'mul' in pos_type: corr", "# Compute the bias evolution bias_evol = self._get_tracer_evol(params, self._tracer1['name']) bias_evol *= self._get_tracer_evol(params, self._tracer2['name'])", "config['rp_rt']) # Create the broadband term dictionary bb = {} bb['name'] = name", "tracer_name : string Name of tracer Returns ------- ND Array Bias evolution for", "self._rescale_coords(self._r, self._mu, ap, at, delta_rp) # Compute the correlation function xi_asy = PktoXi_obj.pk_to_xi_asymmetry(rescaled_r,", "evolution using the right model if 'croom' in evol_model: bias_evol = self._bias_evol_croom(params, tracer_name)", "Dictionary with coordinate grid - r, mu, z scale_params : ScaleParameters ScaleParameters object", "bb['func'] = config['func'] bb['bin_size_rp'] = config['bin_size_rp'] self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb) def compute_broadband(self,", "Returns ------- 1d Array Output broadband \"\"\" rp = self._r * self._mu rt", "\"\"\" # Check the defaults if z_grid is None: z_grid = self._z if", "this term name = 'BB-{}-{} {} {} {}'.format(config['cf_name'], index, config['type'], config['pre'], config['rp_rt']) #", "broadban terms sky_broadbands = [el for el in bb_config if el['func'] == 'broadband_sky']", ": ND Array Input power spectrum PktoXi_obj : vega.PktoXi An instance of the", "bias_z = (p0 + p1*(1. + self._z)**2) / (p0 + p1 * (1", "xi *= self.compute_bias_evol(params) # Add growth xi *= self.xi_growth # Add QSO radiation", "tracer \"\"\" assert tracer_name == \"QSO\" p0 = params[\"croom_par0\"] p1 = params[\"croom_par1\"] bias_z", "name for the parameters of this term name = 'BB-{}-{}-{}'.format(config['cf_name'], index + len(normal_broadbands),", "* ((1 + mu_shift) / lifetime + 1 / decrease)) return xi_rad def", "Computation parameters Returns ------- 1D Xi QSO radiation model \"\"\" assert 'QSO' in", "PktoXi_obj, params) # Add standard asymmetry if self.asymmetry_flag: xi += self.compute_xi_asymmetry(pk_lin, PktoXi_obj, params)", "Omega_m=None, Omega_de=None): \"\"\"Compute growth factor. Implements eq. 7.77 from <NAME>'s Modern Cosmology book.", "def dD1(a, Omega_m, Omega_de): z = 1/a-1 return 1./(a*hubble(z, Omega_m, Omega_de))**3 # Calculate", "mu_shift = rp / r_shift # Get the QSO radiation model parameters strength", "z between 0 and zmax, then interpolate nbins = 100 zmax = 5.", "Input power spectrum pk_lin : 1D Array Linear isotropic power spectrum PktoXi_obj :", "self.radiation_flag and not params['peak']: xi += self.compute_qso_radiation(params) # Add relativistic effects if self.relativistic_flag:", "for tracer \"\"\" handle_name = 'z evol {}'.format(tracer_name) if handle_name in self._config: evol_model", "Parameters ---------- params : dict Computation parameters tracer_name : string Tracer name Returns", "in init as that is only called once # ! Compute is called", "== \"QSO\" p0 = params[\"croom_par0\"] p1 = params[\"croom_par1\"] bias_z = (p0 + p1*(1.", "bias_evol *= self._get_tracer_evol(params, self._tracer2['name']) return bias_evol def _get_tracer_evol(self, params, tracer_name): \"\"\"Compute tracer bias", "kept in init as that is only called once # ! Compute is", "Parameters ---------- bb_config : list list with configs of broadband terms \"\"\" self.bb_terms", "string String with position and type, must be one of: 'pre-mul' or 'pre-add'", "p1 = params[\"croom_par1\"] bias_z = (p0 + p1*(1. + self._z)**2) / (p0 +", "\"\"\" # Compute the bias evolution bias_evol = self._get_tracer_evol(params, self._tracer1['name']) bias_evol *= self._get_tracer_evol(params,", "self.has_bb = True # Check for QSO radiation modeling and check if it", "= rescaled_rp / rescaled_r[mask] return rescaled_r, rescaled_mu def compute_bias_evol(self, params): \"\"\"Compute bias evolution", "and type, must be one of: 'pre-mul' or 'pre-add' or 'post-mul' or 'post-add'", "configuration for bb_term in self.bb_terms[pos_type]: # Check if it's sky or normal broadband", "used for discrete tracers, by default 0. Returns ------- ND Array Rescaled radii", "growth at each redshift on the grid growth = utils.growth_function(z_grid, Omega_m, Omega_de) #", "in pos_type: corr *= 1 + self.broadband_sky(bb_term, params) else: corr += self.broadband_sky(bb_term, params)", "radiation modeling for cross if self.radiation_flag and not params['peak']: xi += self.compute_qso_radiation(params) #", "standard asymmetry if self.asymmetry_flag: xi += self.compute_xi_asymmetry(pk_lin, PktoXi_obj, params) return xi def compute_core(self,", "if self._delta_rp_name is not None: delta_rp = params.get(self._delta_rp_name, 0.) # Get rescaled Xi", "form 'compute_extension' that can be called from outside \"\"\" def __init__(self, config, fiducial,", "from <NAME>'s Modern Cosmology book. Returns ------- ND Array Growth factor \"\"\" #", "Array Rescaled radii ND Array Rescaled mu \"\"\" mask = r != 0", "+ self._z_eff) self._scale_params = scale_params self._metal_corr = metal_corr # Check if we need", "+ tracer1['name'] elif tracer2['type'] == 'discrete' and tracer1['type'] != 'discrete': self._delta_rp_name = 'drp_'", "for el in bb_config if el['func'] == 'broadband_sky'] for index, config in enumerate(sky_broadbands):", "+ self._z_eff)**2) return bias_z def compute_growth(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): \"\"\"Compute growth factor.", "is still None if corr is None: if 'mul' in pos_type: corr =", "bias_z = self._rel_z_evol**p0 return bias_z def _bias_evol_croom(self, params, tracer_name): \"\"\"Bias evolution Croom model", "correction') self.asymmetry_flag = False if 'standard asymmetry' in self._config: self.asymmetry_flag = self._config.getboolean('standard asymmetry')", "params : dict Computation parameters Returns ------- 1D Array Output xi relativistic \"\"\"", "metal_corr=self._metal_corr) rescaled_r, rescaled_mu = self._rescale_coords(self._r, self._mu, ap, at, delta_rp) # Compute correlation function", "sky or normal broadband if bb_term['func'] != 'broadband_sky': # Initialize the broadband and", "np.sqrt(1 - mu[mask]**2) rescaled_rp = ap * rp rescaled_rt = at * rt", "up the sky broadban terms sky_broadbands = [el for el in bb_config if", "self.xi_growth = self.compute_growth_old(self._z, self._z_fid, self._Omega_m, self._Omega_de) # Initialize the broadband self.has_bb = False", "1d Array Output broadband \"\"\" rp = self._r * self._mu rt = self._r", "if 'mul' in pos_type: corr = 1. else: corr = 0. return corr", "term name = 'BB-{}-{} {} {} {}'.format(config['cf_name'], index, config['type'], config['pre'], config['rp_rt']) # Create", "self._bias_evol_std(params, tracer_name) return bias_evol def _bias_evol_std(self, params, tracer_name): \"\"\"Bias evolution standard model. Parameters", "(r_shift**2) * (1 - asymmetry * (1 - mu_shift**2)) xi_rad *= np.exp(-r_shift *", "delta_rp = 0. if self._delta_rp_name is not None: delta_rp = params.get(self._delta_rp_name, 0.) #", "broadband in r and mu or rp,rt. Parameters ---------- bb_term : dict broadband", "ND Array Bias evolution for tracer \"\"\" # Compute the bias evolution bias_evol", "QSO, see Croom et al. 2005. Parameters ---------- params : dict Computation parameters", "'pre-add', 'post-mul', 'post-add'] corr = None # Loop over the right pos/type configuration", "strength = params['qso_rad_strength'] asymmetry = params['qso_rad_asymmetry'] lifetime = params['qso_rad_lifetime'] decrease = params['qso_rad_decrease'] #", "(pre-distortion/post-distortion) and one type (multiplicative/additive). Parameters ---------- params : dict Computation parameters pos_type", "values of z between 0 and zmax, then interpolate nbins = 100 zmax", "2)) return corr def compute_qso_radiation(self, params): \"\"\"Model the contribution of QSO radiation to", "self._tracer2['type']] if ('continuous' not in types) or (types[0] == types[1]): raise ValueError('You asked", "= None if tracer1['type'] == 'discrete' and tracer2['type'] != 'discrete': self._delta_rp_name = 'drp_'", "config['mu_config'] self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb) # Next pick up the sky broadban", "tracer1, tracer2, bb_config=None, metal_corr=False): \"\"\" Parameters ---------- config : ConfigParser model section of", "Computation parameters Returns ------- 1D Array Output correlation function \"\"\" # Compute the", "1d Array Output broadband \"\"\" r1 = self._r / 100. r2 = self._mu", "Omega_de is None: Omega_de = self._Omega_de # Check if we have dark energy", "(rp < bb_term['bin_size_rp']) corr[~w] = 0. return corr def broadband(self, bb_term, params): \"\"\"Compute", "if el['func'] != 'broadband_sky'] for index, config in enumerate(normal_broadbands): # Create the name", "pk, params) return xi_rel def compute_xi_asymmetry(self, pk, PktoXi_obj, params): \"\"\"Calculate the cross-correlation contribution", "self._mu**2) r_shift = np.sqrt(rp**2 + rt**2) mu_shift = rp / r_shift # Get", "+ rt**2) mu_shift = rp / r_shift # Get the QSO radiation model", "pos_type: corr *= 1 + self.broadband_sky(bb_term, params) else: corr += self.broadband_sky(bb_term, params) #", "= params.get(self._delta_rp_name, 0.) rp = self._r * self._mu + delta_rp rt = self._r", "compute_growth_old(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): def hubble(z, Omega_m, Omega_de): return np.sqrt(Omega_m*(1+z)**3 + Omega_de", "self._tracer1['name'] != self._tracer2['name'] # Compute the shifted r and mu grids delta_rp =", "that can be called from outside \"\"\" def __init__(self, config, fiducial, coords_grid, scale_params,", "+ corr elif 'mul' in pos_type: corr *= 1 + self.broadband(bb_term, params) else:", "of the form 'compute_extension' that can be called from outside \"\"\" def __init__(self,", "default 0. Returns ------- ND Array Rescaled radii ND Array Rescaled mu \"\"\"", "params) return xi_rel def compute_xi_asymmetry(self, pk, PktoXi_obj, params): \"\"\"Calculate the cross-correlation contribution from", "in names and 'LYA' in names): raise ValueError('You asked for QSO radiation effects,", "(1. + self._z) / (1 + self._z_eff) self._scale_params = scale_params self._metal_corr = metal_corr", "pk, pk_lin, PktoXi_obj, params): \"\"\"Compute correlation function for input P(k). Parameters ---------- pk", "/ r_shift # Get the QSO radiation model parameters strength = params['qso_rad_strength'] asymmetry", "None if tracer1['type'] == 'discrete' and tracer2['type'] != 'discrete': self._delta_rp_name = 'drp_' +", "---------- params : dict Computation parameters tracer_name : string Tracer name Returns -------", "bias_evol def _bias_evol_std(self, params, tracer_name): \"\"\"Bias evolution standard model. Parameters ---------- params :", "params[bb_term['name'] + '-scale-sky'] sigma = params[bb_term['name'] + '-sigma-sky'] corr = scale / (sigma", "* r2**r2_powers[None, :, None]).sum(axis=(0, 1, 2)) return corr def compute_qso_radiation(self, params): \"\"\"Model the", "0.) & (rp < bb_term['bin_size_rp']) corr[~w] = 0. return corr def broadband(self, bb_term,", ": dict Config of tracer 1 tracer2 : dict Config of tracer 2", "params : dict Computation parameters Returns ------- 1d Array Output broadband \"\"\" r1", "None if corr is None: if 'mul' in pos_type: corr = 1. else:", "(sigma * np.sqrt(2. * np.pi)) corr *= np.exp(-0.5 * (rt / sigma)**2) w", "assert pos_type in ['pre-mul', 'pre-add', 'post-mul', 'post-add'] corr = None # Loop over", "delta_rp = params.get(self._delta_rp_name, 0.) # Get rescaled Xi coordinates ap, at = self._scale_params.get_ap_at(params,", "r[mask] * mu[mask] + delta_rp rt = r[mask] * np.sqrt(1 - mu[mask]**2) rescaled_rp", "cross (the transverse proximity effect) Parameters ---------- params : dict Computation parameters Returns", "mu, z scale_params : ScaleParameters ScaleParameters object tracer1 : dict Config of tracer", "bb_term['mu_config'] r1_powers = np.arange(r_min, r_max + 1, dr) r2_powers = np.arange(mu_min, mu_max +", "/ rescaled_r[mask] return rescaled_r, rescaled_mu def compute_bias_evol(self, params): \"\"\"Compute bias evolution for the", "parameters tracer_name : string Name of tracer Returns ------- ND Array Bias evolution", "& (rp < bb_term['bin_size_rp']) corr[~w] = 0. return corr def broadband(self, bb_term, params):", "cross) self._delta_rp_name = None if tracer1['type'] == 'discrete' and tracer2['type'] != 'discrete': self._delta_rp_name", "self._mu = coords_grid['mu'] self._z = coords_grid['z'] self._multipole = config.getint('single_multipole', -1) self._tracer1 = tracer1", "mu : ND array Array of mu = rp/r coords of Xi ap", "# Add growth xi *= self.xi_growth # Add QSO radiation modeling for cross", "r != 0 rp = r[mask] * mu[mask] + delta_rp rt = r[mask]", "self._r * self._mu rt = self._r * np.sqrt(1 - self._mu**2) scale = params[bb_term['name']", "in r2_powers: bb_params.append(params['{} ({},{})'.format( bb_term['name'], i, j)]) bb_params = np.array(bb_params).reshape(-1, r_max - r_min", "spectrum PktoXi_obj : vega.PktoXi An instance of the transform object used to turn", "self._rel_z_evol = (1. + self._z) / (1 + self._z_eff) self._scale_params = scale_params self._metal_corr", "we need delta rp (Only for the cross) self._delta_rp_name = None if tracer1['type']", "Returns ------- ND Array Bias evolution for tracer \"\"\" assert tracer_name == \"QSO\"", "mu_max + 1, dmu) bb_params = [] for i in r1_powers: for j", "self._rescale_coords(self._r, self._mu, ap, at, delta_rp) # Compute the correlation function xi_rel = PktoXi_obj.pk_to_xi_relativistic(rescaled_r,", "False if 'radiation effects' in self._config: self.radiation_flag = self._config.getboolean('radiation effects') if self.radiation_flag: names", "\"\"\" def __init__(self, config, fiducial, coords_grid, scale_params, tracer1, tracer2, bb_config=None, metal_corr=False): \"\"\" Parameters", "evolution standard model. Parameters ---------- params : dict Computation parameters tracer_name : string", "* np.sqrt(2. * np.pi)) corr *= np.exp(-0.5 * (rt / sigma)**2) w =", "= at * rt rescaled_r = np.zeros(len(r)) rescaled_mu = np.zeros(len(mu)) rescaled_r[mask] = np.sqrt(rescaled_rp**2", "'mul' in pos_type: corr *= 1 + self.broadband(bb_term, params) else: corr += self.broadband(bb_term,", "= 'BB-{}-{}-{}'.format(config['cf_name'], index + len(normal_broadbands), config['func']) # Create the broadband term dictionary bb", "Config of tracer 1 tracer2 : dict Config of tracer 2 bb_config :", "*= np.exp(-0.5 * (rt / sigma)**2) w = (rp >= 0.) & (rp", "ND Array Bias evolution for tracer \"\"\" p0 = params['alpha_{}'.format(tracer_name)] bias_z = self._rel_z_evol**p0", "for QSO, see Croom et al. 2005. Parameters ---------- params : dict Computation", "names = [self._tracer1['name'], self._tracer2['name']] if not ('QSO' in names and 'LYA' in names):", ": ConfigParser model section of config file fiducial : dict fiducial config coords_grid", "decrease)) return xi_rad def compute_xi_relativistic(self, pk, PktoXi_obj, params): \"\"\"Calculate the cross-correlation contribution from", ": vega.PktoXi An instance of the transform object used to turn Pk into", "Gaussian broadband in rp,rt for the sky residuals. Parameters ---------- bb_term : dict", "== 'discrete' and tracer1['type'] != 'discrete': self._delta_rp_name = 'drp_' + tracer2['name'] # Precompute", "\"\"\" assert tracer_name == \"QSO\" p0 = params[\"croom_par0\"] p1 = params[\"croom_par1\"] bias_z =", "\"\"\"Bias evolution standard model. Parameters ---------- params : dict Computation parameters tracer_name :", "def _bias_evol_croom(self, params, tracer_name): \"\"\"Bias evolution Croom model for QSO, see Croom et", "names and 'LYA' in names): raise ValueError('You asked for QSO radiation effects, but", "if 'radiation effects' in self._config: self.radiation_flag = self._config.getboolean('radiation effects') if self.radiation_flag: names =", "(1-Omega_m-Omega_de)*(1+z)**2) def dD1(a, Omega_m, Omega_de): z = 1/a-1 return 1./(a*hubble(z, Omega_m, Omega_de))**3 #", "= self.compute_growth_old(self._z, self._z_fid, self._Omega_m, self._Omega_de) # Initialize the broadband self.has_bb = False if", "= np.array(bb_params).reshape(-1, r_max - r_min + 1) corr = (bb_params[:, :, None, None]", "r_shift # Get the QSO radiation model parameters strength = params['qso_rad_strength'] asymmetry =", "tracer_name == \"QSO\" p0 = params[\"croom_par0\"] p1 = params[\"croom_par1\"] bias_z = (p0 +", "/ 100. * self._mu r2 = self._r / 100. * np.sqrt(1 - self._mu**2)", "bb_config if el['func'] == 'broadband_sky'] for index, config in enumerate(sky_broadbands): assert config['rp_rt'] ==", "100. r2 = self._mu if bb_term['rp_rt'] == 'rp,rt': r1 = self._r / 100.", "\"\"\" assert 'QSO' in [self._tracer1['name'], self._tracer2['name']] assert self._tracer1['name'] != self._tracer2['name'] # Compute the", "Output correlation function \"\"\" # Check for delta rp delta_rp = 0. if", "r1**r1_powers[:, None, None] * r2**r2_powers[None, :, None]).sum(axis=(0, 1, 2)) return corr def compute_qso_radiation(self,", "params.get(self._delta_rp_name, 0.) # Get rescaled Xi coordinates ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r,", "---------- params : dict Computation parameters Returns ------- 1D Xi QSO radiation model", "coords of Xi mu : ND array Array of mu = rp/r coords", "Alpha parallel at : float Alpha transverse delta_rp : float, optional Delta radius_parallel", "# Check if we have dark energy if Omega_de is None: growth =", "= params['alpha_{}'.format(tracer_name)] bias_z = self._rel_z_evol**p0 return bias_z def _bias_evol_croom(self, params, tracer_name): \"\"\"Bias evolution", "method of the form 'compute_extension' that can be called from outside \"\"\" def", "self._multipole = config.getint('single_multipole', -1) self._tracer1 = tracer1 self._tracer2 = tracer2 self._z_eff = fiducial['z_eff']", "for cross if self.radiation_flag and not params['peak']: xi += self.compute_qso_radiation(params) # Add relativistic", "dict Computation parameters Returns ------- ND Array Bias evolution for tracer \"\"\" #", "= 5. z = zmax * np.arange(nbins, dtype=float) / (nbins-1) D1 = np.zeros(nbins,", "params) else: corr += self.broadband(bb_term, params) else: # Initialize the broadband and check", "Parameters ---------- params : dict Computation parameters Returns ------- 1D Xi QSO radiation", "Parameters ---------- pk : ND Array Input power spectrum pk_lin : 1D Array", "\"\"\" r1 = self._r / 100. r2 = self._mu if bb_term['rp_rt'] == 'rp,rt':", "should have their separate method of the form 'compute_extension' that can be called", "= False if 'radiation effects' in self._config: self.radiation_flag = self._config.getboolean('radiation effects') if self.radiation_flag:", "i in range(nbins): a = 1/(1+z[i]) D1[i] = 5/2.*Omega_m*hubble(z[i], *pars)*quad(dD1, 0, a, args=pars)[0]", "default False \"\"\" self._config = config self._r = coords_grid['r'] self._mu = coords_grid['mu'] self._z", "Output xi relativistic \"\"\" assert 'continuous' in [self._tracer1['type'], self._tracer2['type']] assert self._tracer1['type'] != self._tracer2['type']", "params) else: corr += self.broadband_sky(bb_term, params) # Give defaults if corr is still", "and not params['peak']: xi += self.compute_qso_radiation(params) # Add relativistic effects if self.relativistic_flag: xi", "*= self.xi_growth # Add QSO radiation modeling for cross if self.radiation_flag and not", "np.arange(r_min, r_max + 1, dr) r2_powers = np.arange(mu_min, mu_max + 1, dmu) bb_params", "contribution from relativistic effects (Bonvin et al. 2014). Parameters ---------- pk : ND", "ap, at, delta_rp) # Compute the correlation function xi_rel = PktoXi_obj.pk_to_xi_relativistic(rescaled_r, rescaled_mu, pk,", "# Give defaults if corr is still None if corr is None: if", "sky broadban terms sky_broadbands = [el for el in bb_config if el['func'] ==", "self._mu, ap, at, delta_rp) # Compute correlation function xi = PktoXi_obj.compute(rescaled_r, rescaled_mu, pk,", "assert tracer_name == \"QSO\" p0 = params[\"croom_par0\"] p1 = params[\"croom_par1\"] bias_z = (p0", "= config.getint('single_multipole', -1) self._tracer1 = tracer1 self._tracer2 = tracer2 self._z_eff = fiducial['z_eff'] self._rel_z_evol", "interpolate nbins = 100 zmax = 5. z = zmax * np.arange(nbins, dtype=float)", "of broadband terms \"\"\" self.bb_terms = {} self.bb_terms['pre-add'] = [] self.bb_terms['post-add'] = []", "Omega_de) return growth**2 def compute_growth_old(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): def hubble(z, Omega_m, Omega_de):", "asymmetry (Bonvin et al. 2014). Parameters ---------- pk : ND Array Input power", "metal_corr=False): \"\"\" Parameters ---------- config : ConfigParser model section of config file fiducial", "(bb_params[:, :, None, None] * r1**r1_powers[:, None, None] * r2**r2_powers[None, :, None]).sum(axis=(0, 1,", "{} bb['name'] = name bb['func'] = config['func'] bb['bin_size_rp'] = config['bin_size_rp'] self.bb_terms[config['pre'] + \"-\"", "be called from outside \"\"\" def __init__(self, config, fiducial, coords_grid, scale_params, tracer1, tracer2,", ": dict Computation parameters Returns ------- ND Array Bias evolution for tracer \"\"\"", "ScaleParameters ScaleParameters object tracer1 : dict Config of tracer 1 tracer2 : dict", "Omega_de=None): \"\"\"Compute growth factor. Implements eq. 7.77 from <NAME>'s Modern Cosmology book. Returns", "+ p1 * (1 + self._z_eff)**2) return bias_z def compute_growth(self, z_grid=None, z_fid=None, Omega_m=None,", "parameters Returns ------- 1d Array Output broadband \"\"\" rp = self._r * self._mu", "[self._tracer1['type'], self._tracer2['type']] assert self._tracer1['type'] != self._tracer2['type'] # Get rescaled Xi coordinates delta_rp =", "self._Omega_de) # Initialize the broadband self.has_bb = False if bb_config is not None:", "(multiplicative/additive). Parameters ---------- params : dict Computation parameters pos_type : string String with", "transverse proximity effect) Parameters ---------- params : dict Computation parameters Returns ------- 1D", "= 'z evol {}'.format(tracer_name) if handle_name in self._config: evol_model = self._config.get(handle_name, 'standard') else:", "broadband terms for one position (pre-distortion/post-distortion) and one type (multiplicative/additive). Parameters ---------- params", "dict Computation parameters tracer_name : string Tracer name Returns ------- ND Array Bias", "radiation model parameters strength = params['qso_rad_strength'] asymmetry = params['qso_rad_asymmetry'] lifetime = params['qso_rad_lifetime'] decrease", "= 1. else: corr = 0. return corr def broadband_sky(self, bb_term, params): \"\"\"Compute", "to the cross (the transverse proximity effect) Parameters ---------- params : dict Computation", "necessary multipoles and rescales the coordinates Parameters ---------- pk : ND Array Input", "Next pick up the sky broadban terms sky_broadbands = [el for el in", "self._tracer2['name']) return bias_evol def _get_tracer_evol(self, params, tracer_name): \"\"\"Compute tracer bias evolution. Parameters ----------", "self._mu if bb_term['rp_rt'] == 'rp,rt': r1 = self._r / 100. * self._mu r2", "mu or rp,rt. Parameters ---------- bb_term : dict broadband term config params :", "compute_qso_radiation(self, params): \"\"\"Model the contribution of QSO radiation to the cross (the transverse", "in self._config: self.asymmetry_flag = self._config.getboolean('standard asymmetry') if self.relativistic_flag or self.asymmetry_flag: types = [self._tracer1['type'],", "power spectrum PktoXi_obj : vega.PktoXi An instance of the transform object used to", "else: evol_model = self._config.get('z evol', 'standard') # Compute the bias evolution using the", "Bias evolution for tracer \"\"\" assert tracer_name == \"QSO\" p0 = params[\"croom_par0\"] p1", "by default False \"\"\" self._config = config self._r = coords_grid['r'] self._mu = coords_grid['mu']", "rescaled_rp / rescaled_r[mask] return rescaled_r, rescaled_mu def compute_bias_evol(self, params): \"\"\"Compute bias evolution for", "bias evolution bias_evol = self._get_tracer_evol(params, self._tracer1['name']) bias_evol *= self._get_tracer_evol(params, self._tracer2['name']) return bias_evol def", "if corr is None: if 'mul' in pos_type: corr = 1. else: corr", "but it' ' can only be applied to the cross (QSOxLya)') # Check", "!= self._tracer2['name'] # Compute the shifted r and mu grids delta_rp = params.get(self._delta_rp_name,", "# Compute the correlation function xi_asy = PktoXi_obj.pk_to_xi_asymmetry(rescaled_r, rescaled_mu, pk, params) return xi_asy", "= 1/(1+z[i]) D1[i] = 5/2.*Omega_m*hubble(z[i], *pars)*quad(dD1, 0, a, args=pars)[0] D1 = interp1d(z, D1)", "model parameters strength = params['qso_rad_strength'] asymmetry = params['qso_rad_asymmetry'] lifetime = params['qso_rad_lifetime'] decrease =", "rt**2) mu_shift = rp / r_shift # Get the QSO radiation model parameters", "the input P(k), sums the necessary multipoles and rescales the coordinates Parameters ----------", "ap : float Alpha parallel at : float Alpha transverse delta_rp : float,", "from . import utils class CorrelationFunction: \"\"\"Correlation function computation and handling. # !", "# Check if it's sky or normal broadband if bb_term['func'] != 'broadband_sky': #", "(Only for the cross) self._delta_rp_name = None if tracer1['type'] == 'discrete' and tracer2['type']", "if not ('QSO' in names and 'LYA' in names): raise ValueError('You asked for", "parameters tracer_name : string Tracer name Returns ------- ND Array Bias evolution for", "effects, but it' ' can only be applied to the cross (QSOxLya)') #", "name for the parameters of this term name = 'BB-{}-{} {} {} {}'.format(config['cf_name'],", "Does this work for the QSO auto as well? self.radiation_flag = False if", "self._tracer2['type']] assert self._tracer1['type'] != self._tracer2['type'] # Get rescaled Xi coordinates delta_rp = params.get(self._delta_rp_name,", "False if 'relativistic correction' in self._config: self.relativistic_flag = self._config.getboolean('relativistic correction') self.asymmetry_flag = False", "* mu[mask] + delta_rp rt = r[mask] * np.sqrt(1 - mu[mask]**2) rescaled_rp =", "PktoXi_obj, params): \"\"\"Compute correlation function for input P(k). Parameters ---------- pk : ND", "parameters Returns ------- 1d Array Output broadband \"\"\" r1 = self._r / 100.", "lifetime + 1 / decrease)) return xi_rad def compute_xi_relativistic(self, pk, PktoXi_obj, params): \"\"\"Calculate", ": string Name of tracer Returns ------- ND Array Bias evolution for tracer", "Calculates a power-law broadband in r and mu or rp,rt. Parameters ---------- bb_term", "ValueError('You asked for QSO radiation effects, but it' ' can only be applied", "/= utils.growth_function(z_fid, Omega_m, Omega_de) return growth**2 def compute_growth_old(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): def", "file fiducial : dict fiducial config coords_grid : dict Dictionary with coordinate grid", "xi def compute_core(self, pk, PktoXi_obj, params): \"\"\"Compute the core of the correlation function.", "rt rescaled_r = np.zeros(len(r)) rescaled_mu = np.zeros(len(mu)) rescaled_r[mask] = np.sqrt(rescaled_rp**2 + rescaled_rt**2) rescaled_mu[mask]", "dictionary bb = {} bb['name'] = name bb['func'] = config['func'] bb['rp_rt'] = config['rp_rt']", "self._r * self._mu + delta_rp rt = self._r * np.sqrt(1 - self._mu**2) r_shift", "== types[1]): raise ValueError('You asked for relativistic effects or standard asymmetry,' ' but", "------- ND Array Bias evolution for tracer \"\"\" p0 = params['alpha_{}'.format(tracer_name)] bias_z =", "vega.PktoXi An instance of the transform object used to turn Pk into Xi", "if self.relativistic_flag: xi += self.compute_xi_relativistic(pk_lin, PktoXi_obj, params) # Add standard asymmetry if self.asymmetry_flag:", "the growth at each redshift on the grid growth = utils.growth_function(z_grid, Omega_m, Omega_de)", "params[\"croom_par1\"] bias_z = (p0 + p1*(1. + self._z)**2) / (p0 + p1 *", "'mul' in pos_type: corr = 1. else: corr = 0. return corr def", "# Compute the growth at each redshift on the grid growth = utils.growth_function(z_grid,", "from relativistic effects (Bonvin et al. 2014). Parameters ---------- pk : ND Array", "= rp / r_shift # Get the QSO radiation model parameters strength =", "self._z_fid if Omega_m is None: Omega_m = self._Omega_m if Omega_de is None: Omega_de", "evolution for tracer \"\"\" # Compute the bias evolution bias_evol = self._get_tracer_evol(params, self._tracer1['name'])", "+ config['type']].append(bb) def compute_broadband(self, params, pos_type): \"\"\"Compute the broadband terms for one position", "Give defaults if corr is still None if corr is None: if 'mul'", "= scale / (sigma * np.sqrt(2. * np.pi)) corr *= np.exp(-0.5 * (rt", "dict Computation parameters Returns ------- 1d Array Output broadband \"\"\" r1 = self._r", "model section of config file fiducial : dict fiducial config coords_grid : dict", "Returns ------- ND Array Growth factor \"\"\" # Check the defaults if z_grid", "the broadband terms. Parameters ---------- bb_config : list list with configs of broadband", "+ '-scale-sky'] sigma = params[bb_term['name'] + '-sigma-sky'] corr = scale / (sigma *", "corr def broadband_sky(self, bb_term, params): \"\"\"Compute sky broadband term. Calculates a Gaussian broadband", "does the Hankel transform of the input P(k), sums the necessary multipoles and", "config in enumerate(normal_broadbands): # Create the name for the parameters of this term", "---------- params : dict Computation parameters pos_type : string String with position and", "*pars)*quad(dD1, 0, a, args=pars)[0] D1 = interp1d(z, D1) growth = D1(z_grid) / D1(z_fid)", "Computation parameters Returns ------- 1d Array Output broadband \"\"\" r1 = self._r /", "= [self._tracer1['type'], self._tracer2['type']] if ('continuous' not in types) or (types[0] == types[1]): raise", "delta_rp rt = r[mask] * np.sqrt(1 - mu[mask]**2) rescaled_rp = ap * rp", "config params : dict Computation parameters Returns ------- 1d Array Output broadband \"\"\"", "for tracer \"\"\" assert tracer_name == \"QSO\" p0 = params[\"croom_par0\"] p1 = params[\"croom_par1\"]", "* np.arange(nbins, dtype=float) / (nbins-1) D1 = np.zeros(nbins, dtype=float) pars = (Omega_m, Omega_de)", "Omega_m=None, Omega_de=None): def hubble(z, Omega_m, Omega_de): return np.sqrt(Omega_m*(1+z)**3 + Omega_de + (1-Omega_m-Omega_de)*(1+z)**2) def", "model if 'croom' in evol_model: bias_evol = self._bias_evol_croom(params, tracer_name) else: bias_evol = self._bias_evol_std(params,", "params : dict Computation parameters tracer_name : string Name of tracer Returns -------", "coordinate grid - r, mu, z scale_params : ScaleParameters ScaleParameters object tracer1 :", "# Compute the bias evolution using the right model if 'croom' in evol_model:", "= params['qso_rad_strength'] asymmetry = params['qso_rad_asymmetry'] lifetime = params['qso_rad_lifetime'] decrease = params['qso_rad_decrease'] # Compute", "list list with configs of broadband terms \"\"\" self.bb_terms = {} self.bb_terms['pre-add'] =", "utils class CorrelationFunction: \"\"\"Correlation function computation and handling. # ! Slow operations should", "radius coords of Xi mu : ND array Array of mu = rp/r", "Parameters ---------- r : ND array Array of radius coords of Xi mu", "Parameters ---------- params : dict Computation parameters pos_type : string String with position", "types) or (types[0] == types[1]): raise ValueError('You asked for relativistic effects or standard", "self._config: self.relativistic_flag = self._config.getboolean('relativistic correction') self.asymmetry_flag = False if 'standard asymmetry' in self._config:", "pk, PktoXi_obj, params): \"\"\"Calculate the cross-correlation contribution from relativistic effects (Bonvin et al.", "= np.sqrt(rp**2 + rt**2) mu_shift = rp / r_shift # Get the QSO", "self.relativistic_flag: xi += self.compute_xi_relativistic(pk_lin, PktoXi_obj, params) # Add standard asymmetry if self.asymmetry_flag: xi", "params : dict Computation parameters Returns ------- 1D Array Output correlation function \"\"\"", "parameters of this term name = 'BB-{}-{} {} {} {}'.format(config['cf_name'], index, config['type'], config['pre'],", "dict Computation parameters Returns ------- 1D Xi QSO radiation model \"\"\" assert 'QSO'", "def compute_growth(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): \"\"\"Compute growth factor. Implements eq. 7.77 from", "numpy as np from scipy.integrate import quad from scipy.interpolate import interp1d from .", "Check for delta rp delta_rp = 0. if self._delta_rp_name is not None: delta_rp", "for the sky residuals. Parameters ---------- bb_term : dict broadband term config params", "evolution for the correlation function. Parameters ---------- params : dict Computation parameters Returns", "args=pars)[0] D1 = interp1d(z, D1) growth = D1(z_grid) / D1(z_fid) return growth**2 def", "i, j)]) bb_params = np.array(bb_params).reshape(-1, r_max - r_min + 1) corr = (bb_params[:,", "function xi_rel = PktoXi_obj.pk_to_xi_relativistic(rescaled_r, rescaled_mu, pk, params) return xi_rel def compute_xi_asymmetry(self, pk, PktoXi_obj,", "the name for the parameters of this term name = 'BB-{}-{}-{}'.format(config['cf_name'], index +", "if corr is still None if corr is None: if 'mul' in pos_type:", "self.asymmetry_flag: xi += self.compute_xi_asymmetry(pk_lin, PktoXi_obj, params) return xi def compute_core(self, pk, PktoXi_obj, params):", "corr *= 1 + self.broadband_sky(bb_term, params) else: corr += self.broadband_sky(bb_term, params) # Give", "for i in range(nbins): a = 1/(1+z[i]) D1[i] = 5/2.*Omega_m*hubble(z[i], *pars)*quad(dD1, 0, a,", "(rp >= 0.) & (rp < bb_term['bin_size_rp']) corr[~w] = 0. return corr def", "fiducial, coords_grid, scale_params, tracer1, tracer2, bb_config=None, metal_corr=False): \"\"\" Parameters ---------- config : ConfigParser", "np.exp(-r_shift * ((1 + mu_shift) / lifetime + 1 / decrease)) return xi_rad", "config['func']) # Create the broadband term dictionary bb = {} bb['name'] = name", "Computation parameters Returns ------- 1D Array Output correlation function \"\"\" # Check for", "j in r2_powers: bb_params.append(params['{} ({},{})'.format( bb_term['name'], i, j)]) bb_params = np.array(bb_params).reshape(-1, r_max -", "= 'BB-{}-{} {} {} {}'.format(config['cf_name'], index, config['type'], config['pre'], config['rp_rt']) # Create the broadband", "self._Omega_m = fiducial.get('Omega_m', None) self._Omega_de = fiducial.get('Omega_de', None) if not config.getboolean('old_growth_func', False): self.xi_growth", "bb_term['r_config'] mu_min, mu_max, dmu = bb_term['mu_config'] r1_powers = np.arange(r_min, r_max + 1, dr)", "the broadband and check # if we need to add or multiply if", "rp (Only for the cross) self._delta_rp_name = None if tracer1['type'] == 'discrete' and", "evol_model: bias_evol = self._bias_evol_croom(params, tracer_name) else: bias_evol = self._bias_evol_std(params, tracer_name) return bias_evol def", "'post-add' Returns ------- 1d Array Output broadband \"\"\" assert pos_type in ['pre-mul', 'pre-add',", "= 0. return corr def broadband(self, bb_term, params): \"\"\"Compute broadband term. Calculates a", "+ delta_rp rt = r[mask] * np.sqrt(1 - mu[mask]**2) rescaled_rp = ap *", "*= self.compute_bias_evol(params) # Add growth xi *= self.xi_growth # Add QSO radiation modeling", "not ('QSO' in names and 'LYA' in names): raise ValueError('You asked for QSO", "Linear isotropic power spectrum PktoXi_obj : vega.PktoXi An instance of the transform object", "operations should be kept in init as that is only called once #", "assert self._tracer1['type'] != self._tracer2['type'] # Get rescaled Xi coordinates delta_rp = params.get(self._delta_rp_name, 0.)", "xi += self.compute_xi_asymmetry(pk_lin, PktoXi_obj, params) return xi def compute_core(self, pk, PktoXi_obj, params): \"\"\"Compute", "and mu grids delta_rp = params.get(self._delta_rp_name, 0.) rp = self._r * self._mu +", "evolution for tracer \"\"\" p0 = params['alpha_{}'.format(tracer_name)] bias_z = self._rel_z_evol**p0 return bias_z def", "---------- pk : ND Array Input power spectrum PktoXi_obj : vega.PktoXi An instance", "= name bb['func'] = config['func'] bb['bin_size_rp'] = config['bin_size_rp'] self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb)", "scipy.integrate import quad from scipy.interpolate import interp1d from . import utils class CorrelationFunction:", "the shifted r and mu grids delta_rp = params.get(self._delta_rp_name, 0.) rp = self._r", "float Alpha transverse delta_rp : float, optional Delta radius_parallel - nuisance correction for", "elif tracer2['type'] == 'discrete' and tracer1['type'] != 'discrete': self._delta_rp_name = 'drp_' + tracer2['name']", ">= 0.) & (rp < bb_term['bin_size_rp']) corr[~w] = 0. return corr def broadband(self,", "self._Omega_m if Omega_de is None: Omega_de = self._Omega_de # Check if we have", "------- ND Array Bias evolution for tracer \"\"\" handle_name = 'z evol {}'.format(tracer_name)", "tracer 2 bb_config : list, optional list with configs of broadband terms, by", "z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): def hubble(z, Omega_m, Omega_de): return np.sqrt(Omega_m*(1+z)**3 + Omega_de +", "\"\"\" assert 'continuous' in [self._tracer1['type'], self._tracer2['type']] assert self._tracer1['type'] != self._tracer2['type'] # Get rescaled", "= None # Loop over the right pos/type configuration for bb_term in self.bb_terms[pos_type]:", "self.broadband(bb_term, params) if 'mul' in pos_type: corr = 1 + corr elif 'mul'", "compute_core(self, pk, PktoXi_obj, params): \"\"\"Compute the core of the correlation function. This does", "1/(1+z[i]) D1[i] = 5/2.*Omega_m*hubble(z[i], *pars)*quad(dD1, 0, a, args=pars)[0] D1 = interp1d(z, D1) growth", "bb_config=None, metal_corr=False): \"\"\" Parameters ---------- config : ConfigParser model section of config file", "self._r * np.sqrt(1 - self._mu**2) r_shift = np.sqrt(rp**2 + rt**2) mu_shift = rp", "xi += self.compute_xi_relativistic(pk_lin, PktoXi_obj, params) # Add standard asymmetry if self.asymmetry_flag: xi +=", "dmu) bb_params = [] for i in r1_powers: for j in r2_powers: bb_params.append(params['{}", "configs of broadband terms \"\"\" self.bb_terms = {} self.bb_terms['pre-add'] = [] self.bb_terms['post-add'] =", "corr *= 1 + self.broadband(bb_term, params) else: corr += self.broadband(bb_term, params) else: #", "Returns ------- ND Array Bias evolution for tracer \"\"\" p0 = params['alpha_{}'.format(tracer_name)] bias_z", "section of config file fiducial : dict fiducial config coords_grid : dict Dictionary", "pk, PktoXi_obj, params): \"\"\"Calculate the cross-correlation contribution from standard asymmetry (Bonvin et al.", "+= self.compute_xi_relativistic(pk_lin, PktoXi_obj, params) # Add standard asymmetry if self.asymmetry_flag: xi += self.compute_xi_asymmetry(pk_lin,", "self.radiation_flag = self._config.getboolean('radiation effects') if self.radiation_flag: names = [self._tracer1['name'], self._tracer2['name']] if not ('QSO'", "function for input P(k). Parameters ---------- pk : ND Array Input power spectrum", "coords_grid['mu'] self._z = coords_grid['z'] self._multipole = config.getint('single_multipole', -1) self._tracer1 = tracer1 self._tracer2 =", "fiducial redshift growth /= utils.growth_function(z_fid, Omega_m, Omega_de) return growth**2 def compute_growth_old(self, z_grid=None, z_fid=None,", "None: if 'mul' in pos_type: corr = 1. else: corr = 0. return", "0 and zmax, then interpolate nbins = 100 zmax = 5. z =", "et al. 2014). Parameters ---------- pk : ND Array Input power spectrum PktoXi_obj", "relativistic effects (Bonvin et al. 2014). Parameters ---------- pk : ND Array Input", "Omega_de) # Scale to the fiducial redshift growth /= utils.growth_function(z_fid, Omega_m, Omega_de) return", "rescaled Xi coordinates delta_rp = params.get(self._delta_rp_name, 0.) ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r,", "names): raise ValueError('You asked for QSO radiation effects, but it' ' can only", "Omega_m is None: Omega_m = self._Omega_m if Omega_de is None: Omega_de = self._Omega_de", "def broadband(self, bb_term, params): \"\"\"Compute broadband term. Calculates a power-law broadband in r", "rp rescaled_rt = at * rt rescaled_r = np.zeros(len(r)) rescaled_mu = np.zeros(len(mu)) rescaled_r[mask]", "len(normal_broadbands), config['func']) # Create the broadband term dictionary bb = {} bb['name'] =", "in bb_config if el['func'] != 'broadband_sky'] for index, config in enumerate(normal_broadbands): # Create", "(Bonvin et al. 2014). Parameters ---------- pk : ND Array Input power spectrum", "types = [self._tracer1['type'], self._tracer2['type']] if ('continuous' not in types) or (types[0] == types[1]):", "= tracer2 self._z_eff = fiducial['z_eff'] self._rel_z_evol = (1. + self._z) / (1 +", "ND Array Bias evolution for tracer \"\"\" assert tracer_name == \"QSO\" p0 =", "and zmax, then interpolate nbins = 100 zmax = 5. z = zmax", "if z_fid is None: z_fid = self._z_fid if Omega_m is None: Omega_m =", "Omega_de + (1-Omega_m-Omega_de)*(1+z)**2) def dD1(a, Omega_m, Omega_de): z = 1/a-1 return 1./(a*hubble(z, Omega_m,", "Output broadband \"\"\" r1 = self._r / 100. r2 = self._mu if bb_term['rp_rt']", "/ (1 + self._z_eff) self._scale_params = scale_params self._metal_corr = metal_corr # Check if", "spectrum pk_lin : 1D Array Linear isotropic power spectrum PktoXi_obj : vega.PktoXi An", "rp,rt for the sky residuals. Parameters ---------- bb_term : dict broadband term config", "{}'.format(tracer_name) if handle_name in self._config: evol_model = self._config.get(handle_name, 'standard') else: evol_model = self._config.get('z", "sums the necessary multipoles and rescales the coordinates Parameters ---------- pk : ND", "evol {}'.format(tracer_name) if handle_name in self._config: evol_model = self._config.get(handle_name, 'standard') else: evol_model =", "+ rescaled_rt**2) rescaled_mu[mask] = rescaled_rp / rescaled_r[mask] return rescaled_r, rescaled_mu def compute_bias_evol(self, params):", "work for the cross') def compute(self, pk, pk_lin, PktoXi_obj, params): \"\"\"Compute correlation function", "< bb_term['bin_size_rp']) corr[~w] = 0. return corr def broadband(self, bb_term, params): \"\"\"Compute broadband", "from scipy.integrate import quad from scipy.interpolate import interp1d from . import utils class", "rp = self._r * self._mu rt = self._r * np.sqrt(1 - self._mu**2) scale", "terms for one position (pre-distortion/post-distortion) and one type (multiplicative/additive). Parameters ---------- params :", "self._Omega_m, self._Omega_de) # Initialize the broadband self.has_bb = False if bb_config is not", "self._config: evol_model = self._config.get(handle_name, 'standard') else: evol_model = self._config.get('z evol', 'standard') # Compute", "tracer2['type'] != 'discrete': self._delta_rp_name = 'drp_' + tracer1['name'] elif tracer2['type'] == 'discrete' and", "params : dict Computation parameters Returns ------- 1D Array Output xi asymmetry \"\"\"", "name bb['func'] = config['func'] bb['rp_rt'] = config['rp_rt'] bb['r_config'] = config['r_config'] bb['mu_config'] = config['mu_config']", "asymmetry \"\"\" assert 'continuous' in [self._tracer1['type'], self._tracer2['type']] assert self._tracer1['type'] != self._tracer2['type'] # Get", "self._tracer2['name']] assert self._tracer1['name'] != self._tracer2['name'] # Compute the shifted r and mu grids", "only work for the cross') def compute(self, pk, pk_lin, PktoXi_obj, params): \"\"\"Compute correlation", "+ z_grid) return growth**2 # Compute the growth at each redshift on the", "assert self._tracer1['name'] != self._tracer2['name'] # Compute the shifted r and mu grids delta_rp", "each redshift on the grid growth = utils.growth_function(z_grid, Omega_m, Omega_de) # Scale to", "correlation function xi_rel = PktoXi_obj.pk_to_xi_relativistic(rescaled_r, rescaled_mu, pk, params) return xi_rel def compute_xi_asymmetry(self, pk,", "self._delta_rp_name is not None: delta_rp = params.get(self._delta_rp_name, 0.) # Get rescaled Xi coordinates", "mu_shift) / lifetime + 1 / decrease)) return xi_rad def compute_xi_relativistic(self, pk, PktoXi_obj,", "= fiducial.get('Omega_de', None) if not config.getboolean('old_growth_func', False): self.xi_growth = self.compute_growth(self._z, self._z_fid, self._Omega_m, self._Omega_de)", "Output correlation function \"\"\" # Compute the core xi = self.compute_core(pk, PktoXi_obj, params)", "None: self._init_broadband(bb_config) self.has_bb = True # Check for QSO radiation modeling and check", "- mu[mask]**2) rescaled_rp = ap * rp rescaled_rt = at * rt rescaled_r", "\"QSO\" p0 = params[\"croom_par0\"] p1 = params[\"croom_par1\"] bias_z = (p0 + p1*(1. +", "parallel at : float Alpha transverse delta_rp : float, optional Delta radius_parallel -", "Xi coordinates using ap/at. Parameters ---------- r : ND array Array of radius", "for the QSO auto as well? self.radiation_flag = False if 'radiation effects' in", "def compute_growth_old(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): def hubble(z, Omega_m, Omega_de): return np.sqrt(Omega_m*(1+z)**3 +", "= bb_term['mu_config'] r1_powers = np.arange(r_min, r_max + 1, dr) r2_powers = np.arange(mu_min, mu_max", "sky residuals. Parameters ---------- bb_term : dict broadband term config params : dict", "= 'drp_' + tracer2['name'] # Precompute growth self._z_fid = fiducial['z_fiducial'] self._Omega_m = fiducial.get('Omega_m',", "def compute_core(self, pk, PktoXi_obj, params): \"\"\"Compute the core of the correlation function. This", "compute_growth(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): \"\"\"Compute growth factor. Implements eq. 7.77 from <NAME>'s", "_bias_evol_std(self, params, tracer_name): \"\"\"Bias evolution standard model. Parameters ---------- params : dict Computation", "for discrete tracers, by default 0. Returns ------- ND Array Rescaled radii ND", "/ decrease)) return xi_rad def compute_xi_relativistic(self, pk, PktoXi_obj, params): \"\"\"Calculate the cross-correlation contribution", "the cross') def compute(self, pk, pk_lin, PktoXi_obj, params): \"\"\"Compute correlation function for input", "= self.compute_growth(self._z, self._z_fid, self._Omega_m, self._Omega_de) else: self.xi_growth = self.compute_growth_old(self._z, self._z_fid, self._Omega_m, self._Omega_de) #", "model. Parameters ---------- params : dict Computation parameters tracer_name : string Tracer name", "r_max + 1, dr) r2_powers = np.arange(mu_min, mu_max + 1, dmu) bb_params =", "------- 1d Array Output broadband \"\"\" r1 = self._r / 100. r2 =", "Omega_m, Omega_de): return np.sqrt(Omega_m*(1+z)**3 + Omega_de + (1-Omega_m-Omega_de)*(1+z)**2) def dD1(a, Omega_m, Omega_de): z", "1, 2)) return corr def compute_qso_radiation(self, params): \"\"\"Model the contribution of QSO radiation", "mu = rp/r coords of Xi ap : float Alpha parallel at :", "elif 'mul' in pos_type: corr *= 1 + self.broadband(bb_term, params) else: corr +=", "from standard asymmetry (Bonvin et al. 2014). Parameters ---------- pk : ND Array", "self._config = config self._r = coords_grid['r'] self._mu = coords_grid['mu'] self._z = coords_grid['z'] self._multipole", "asymmetry if self.asymmetry_flag: xi += self.compute_xi_asymmetry(pk_lin, PktoXi_obj, params) return xi def compute_core(self, pk,", "cross-correlation contribution from relativistic effects (Bonvin et al. 2014). Parameters ---------- pk :", "params, tracer_name): \"\"\"Bias evolution Croom model for QSO, see Croom et al. 2005.", "the broadband terms for one position (pre-distortion/post-distortion) and one type (multiplicative/additive). Parameters ----------", "[] self.bb_terms['post-mul'] = [] # First pick up the normal broadband terms normal_broadbands", "# Compute the correlation function xi_rel = PktoXi_obj.pk_to_xi_relativistic(rescaled_r, rescaled_mu, pk, params) return xi_rel", "tracer_name) else: bias_evol = self._bias_evol_std(params, tracer_name) return bias_evol def _bias_evol_std(self, params, tracer_name): \"\"\"Bias", "in self._config: self.relativistic_flag = self._config.getboolean('relativistic correction') self.asymmetry_flag = False if 'standard asymmetry' in", "self.compute_growth(self._z, self._z_fid, self._Omega_m, self._Omega_de) else: self.xi_growth = self.compute_growth_old(self._z, self._z_fid, self._Omega_m, self._Omega_de) # Initialize", "sky_broadbands = [el for el in bb_config if el['func'] == 'broadband_sky'] for index,", "it' ' can only be applied to the cross (QSOxLya)') # Check for", "effect) Parameters ---------- params : dict Computation parameters Returns ------- 1D Xi QSO", ": dict Computation parameters Returns ------- 1D Array Output xi relativistic \"\"\" assert", "index, config in enumerate(normal_broadbands): # Create the name for the parameters of this", "and rescales the coordinates Parameters ---------- pk : ND Array Input power spectrum", "model for QSO, see Croom et al. 2005. Parameters ---------- params : dict", "# Create the name for the parameters of this term name = 'BB-{}-{}", "_bias_evol_croom(self, params, tracer_name): \"\"\"Bias evolution Croom model for QSO, see Croom et al.", "Check if it's sky or normal broadband if bb_term['func'] != 'broadband_sky': # Initialize", "+ Omega_de + (1-Omega_m-Omega_de)*(1+z)**2) def dD1(a, Omega_m, Omega_de): z = 1/a-1 return 1./(a*hubble(z,", "ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu = self._rescale_coords(self._r, self._mu, ap, at, delta_rp)", "self._init_broadband(bb_config) self.has_bb = True # Check for QSO radiation modeling and check if", "import interp1d from . import utils class CorrelationFunction: \"\"\"Correlation function computation and handling.", "self.xi_growth = self.compute_growth(self._z, self._z_fid, self._Omega_m, self._Omega_de) else: self.xi_growth = self.compute_growth_old(self._z, self._z_fid, self._Omega_m, self._Omega_de)", "0. Returns ------- ND Array Rescaled radii ND Array Rescaled mu \"\"\" mask", "'discrete': self._delta_rp_name = 'drp_' + tracer1['name'] elif tracer2['type'] == 'discrete' and tracer1['type'] !=", "* np.sqrt(1 - self._mu**2) scale = params[bb_term['name'] + '-scale-sky'] sigma = params[bb_term['name'] +", "the Hankel transform of the input P(k), sums the necessary multipoles and rescales", "= {} self.bb_terms['pre-add'] = [] self.bb_terms['post-add'] = [] self.bb_terms['pre-mul'] = [] self.bb_terms['post-mul'] =", "self.relativistic_flag = False if 'relativistic correction' in self._config: self.relativistic_flag = self._config.getboolean('relativistic correction') self.asymmetry_flag", "= params[bb_term['name'] + '-sigma-sky'] corr = scale / (sigma * np.sqrt(2. * np.pi))", "'radiation effects' in self._config: self.radiation_flag = self._config.getboolean('radiation effects') if self.radiation_flag: names = [self._tracer1['name'],", "dict Computation parameters Returns ------- 1D Array Output correlation function \"\"\" # Compute", "if it's sky or normal broadband if bb_term['func'] != 'broadband_sky': # Initialize the", "ND Array Rescaled radii ND Array Rescaled mu \"\"\" mask = r !=", ": dict broadband term config params : dict Computation parameters Returns ------- 1d", "r1 = self._r / 100. r2 = self._mu if bb_term['rp_rt'] == 'rp,rt': r1", "# Add bias evolution xi *= self.compute_bias_evol(params) # Add growth xi *= self.xi_growth", "array Array of radius coords of Xi mu : ND array Array of", "Compute the shifted r and mu grids delta_rp = params.get(self._delta_rp_name, 0.) rp =", "in self.bb_terms[pos_type]: # Check if it's sky or normal broadband if bb_term['func'] !=", "modeling and check if it is QSOxLYA # Does this work for the", "self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb) # Next pick up the sky broadban terms", "r1_powers: for j in r2_powers: bb_params.append(params['{} ({},{})'.format( bb_term['name'], i, j)]) bb_params = np.array(bb_params).reshape(-1,", "delta_rp : float, optional Delta radius_parallel - nuisance correction for wrong redshift, used", "{} self.bb_terms['pre-add'] = [] self.bb_terms['post-add'] = [] self.bb_terms['pre-mul'] = [] self.bb_terms['post-mul'] = []", "Returns ------- 1D Array Output xi relativistic \"\"\" assert 'continuous' in [self._tracer1['type'], self._tracer2['type']]", "for the correlation function. Parameters ---------- params : dict Computation parameters Returns -------", "but they only work for the cross') def compute(self, pk, pk_lin, PktoXi_obj, params):", "params) else: # Initialize the broadband and check # if we need to", "self._delta_rp_name = None if tracer1['type'] == 'discrete' and tracer2['type'] != 'discrete': self._delta_rp_name =", "if el['func'] == 'broadband_sky'] for index, config in enumerate(sky_broadbands): assert config['rp_rt'] == 'rp,rt'", "Rescaled radii ND Array Rescaled mu \"\"\" mask = r != 0 rp", "of tracer 1 tracer2 : dict Config of tracer 2 bb_config : list,", "r : ND array Array of radius coords of Xi mu : ND", "Compute the bias evolution using the right model if 'croom' in evol_model: bias_evol", "1 + self.broadband(bb_term, params) else: corr += self.broadband(bb_term, params) else: # Initialize the", "def compute(self, pk, pk_lin, PktoXi_obj, params): \"\"\"Compute correlation function for input P(k). Parameters", "compute(self, pk, pk_lin, PktoXi_obj, params): \"\"\"Compute correlation function for input P(k). Parameters ----------", "= params[bb_term['name'] + '-scale-sky'] sigma = params[bb_term['name'] + '-sigma-sky'] corr = scale /", "* rt rescaled_r = np.zeros(len(r)) rescaled_mu = np.zeros(len(mu)) rescaled_r[mask] = np.sqrt(rescaled_rp**2 + rescaled_rt**2)", "Create the broadband term dictionary bb = {} bb['name'] = name bb['func'] =", "self.asymmetry_flag = self._config.getboolean('standard asymmetry') if self.relativistic_flag or self.asymmetry_flag: types = [self._tracer1['type'], self._tracer2['type']] if", "self._mu, ap, at, delta_rp) # Compute the correlation function xi_asy = PktoXi_obj.pk_to_xi_asymmetry(rescaled_r, rescaled_mu,", "tracer bias evolution. Parameters ---------- params : dict Computation parameters tracer_name : string", "the core of the correlation function. This does the Hankel transform of the", "= self._config.getboolean('radiation effects') if self.radiation_flag: names = [self._tracer1['name'], self._tracer2['name']] if not ('QSO' in", "params[\"croom_par0\"] p1 = params[\"croom_par1\"] bias_z = (p0 + p1*(1. + self._z)**2) / (p0", "config['func'] bb['rp_rt'] = config['rp_rt'] bb['r_config'] = config['r_config'] bb['mu_config'] = config['mu_config'] self.bb_terms[config['pre'] + \"-\"", "\"\"\" assert pos_type in ['pre-mul', 'pre-add', 'post-mul', 'post-add'] corr = None # Loop", "redshift on the grid growth = utils.growth_function(z_grid, Omega_m, Omega_de) # Scale to the", "pk_lin : 1D Array Linear isotropic power spectrum PktoXi_obj : vega.PktoXi An instance", "tracer1['name'] elif tracer2['type'] == 'discrete' and tracer1['type'] != 'discrete': self._delta_rp_name = 'drp_' +", "rescaled Xi coordinates ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu = self._rescale_coords(self._r, self._mu,", "return bias_z def _bias_evol_croom(self, params, tracer_name): \"\"\"Bias evolution Croom model for QSO, see", "+ len(normal_broadbands), config['func']) # Create the broadband term dictionary bb = {} bb['name']", "params['qso_rad_decrease'] # Compute the QSO radiation model xi_rad = strength / (r_shift**2) *", "/ sigma)**2) w = (rp >= 0.) & (rp < bb_term['bin_size_rp']) corr[~w] =", "ap, at, delta_rp) # Compute correlation function xi = PktoXi_obj.compute(rescaled_r, rescaled_mu, pk, self._multipole)", "scale_params self._metal_corr = metal_corr # Check if we need delta rp (Only for", "config['r_config'] bb['mu_config'] = config['mu_config'] self.bb_terms[config['pre'] + \"-\" + config['type']].append(bb) # Next pick up", "instance of the transform object used to turn Pk into Xi params :", "* self._mu + delta_rp rt = self._r * np.sqrt(1 - self._mu**2) r_shift =", "r2**r2_powers[None, :, None]).sum(axis=(0, 1, 2)) return corr def compute_qso_radiation(self, params): \"\"\"Model the contribution", "need delta rp (Only for the cross) self._delta_rp_name = None if tracer1['type'] ==", "np.array(bb_params).reshape(-1, r_max - r_min + 1) corr = (bb_params[:, :, None, None] *", "using ap/at. Parameters ---------- r : ND array Array of radius coords of", "elif 'mul' in pos_type: corr *= 1 + self.broadband_sky(bb_term, params) else: corr +=", "100. * np.sqrt(1 - self._mu**2) r_min, r_max, dr = bb_term['r_config'] mu_min, mu_max, dmu", "Omega_m, Omega_de) return growth**2 def compute_growth_old(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): def hubble(z, Omega_m,", "add or multiply if corr is None: corr = self.broadband(bb_term, params) if 'mul'", "the necessary multipoles and rescales the coordinates Parameters ---------- pk : ND Array", "the QSO radiation model parameters strength = params['qso_rad_strength'] asymmetry = params['qso_rad_asymmetry'] lifetime =", "of the input P(k), sums the necessary multipoles and rescales the coordinates Parameters", "Array Output broadband \"\"\" r1 = self._r / 100. r2 = self._mu if", "pick up the normal broadband terms normal_broadbands = [el for el in bb_config", "Extensions should have their separate method of the form 'compute_extension' that can be", "\"\"\"Correlation function computation and handling. # ! Slow operations should be kept in", "ap * rp rescaled_rt = at * rt rescaled_r = np.zeros(len(r)) rescaled_mu =", "Omega_de=None): def hubble(z, Omega_m, Omega_de): return np.sqrt(Omega_m*(1+z)**3 + Omega_de + (1-Omega_m-Omega_de)*(1+z)**2) def dD1(a,", "if corr is None: corr = self.broadband(bb_term, params) if 'mul' in pos_type: corr", "configs of broadband terms, by default None metal_corr : bool, optional Whether this", "or standard asymmetry,' ' but they only work for the cross') def compute(self,", "# Scale to the fiducial redshift growth /= utils.growth_function(z_fid, Omega_m, Omega_de) return growth**2", "0. return corr def broadband_sky(self, bb_term, params): \"\"\"Compute sky broadband term. Calculates a", "mu[mask]**2) rescaled_rp = ap * rp rescaled_rt = at * rt rescaled_r =", "return corr def broadband(self, bb_term, params): \"\"\"Compute broadband term. Calculates a power-law broadband", "= False if 'standard asymmetry' in self._config: self.asymmetry_flag = self._config.getboolean('standard asymmetry') if self.relativistic_flag", "of config file fiducial : dict fiducial config coords_grid : dict Dictionary with", "PktoXi_obj : vega.PktoXi An instance of the transform object used to turn Pk", "= params.get(self._delta_rp_name, 0.) # Get rescaled Xi coordinates ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr)", "params : dict Computation parameters Returns ------- 1d Array Output broadband \"\"\" rp", "CorrelationFunction: \"\"\"Correlation function computation and handling. # ! Slow operations should be kept", "bias_z def compute_growth(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): \"\"\"Compute growth factor. Implements eq. 7.77", "Array Input power spectrum pk_lin : 1D Array Linear isotropic power spectrum PktoXi_obj", "is not None: self._init_broadband(bb_config) self.has_bb = True # Check for QSO radiation modeling", "None) self._Omega_de = fiducial.get('Omega_de', None) if not config.getboolean('old_growth_func', False): self.xi_growth = self.compute_growth(self._z, self._z_fid,", "or self.asymmetry_flag: types = [self._tracer1['type'], self._tracer2['type']] if ('continuous' not in types) or (types[0]", "# Loop over the right pos/type configuration for bb_term in self.bb_terms[pos_type]: # Check", "growth xi *= self.xi_growth # Add QSO radiation modeling for cross if self.radiation_flag", "# Get rescaled Xi coordinates delta_rp = params.get(self._delta_rp_name, 0.) ap, at = self._scale_params.get_ap_at(params,", "= self._rescale_coords(self._r, self._mu, ap, at, delta_rp) # Compute the correlation function xi_rel =", "/ 100. r2 = self._mu if bb_term['rp_rt'] == 'rp,rt': r1 = self._r /", "= params[\"croom_par1\"] bias_z = (p0 + p1*(1. + self._z)**2) / (p0 + p1", "------- 1D Array Output xi asymmetry \"\"\" assert 'continuous' in [self._tracer1['type'], self._tracer2['type']] assert", "list, optional list with configs of broadband terms, by default None metal_corr :", "scale_params, tracer1, tracer2, bb_config=None, metal_corr=False): \"\"\" Parameters ---------- config : ConfigParser model section", "zmax = 5. z = zmax * np.arange(nbins, dtype=float) / (nbins-1) D1 =", "First pick up the normal broadband terms normal_broadbands = [el for el in", "be fast Extensions should have their separate method of the form 'compute_extension' that", "config : ConfigParser model section of config file fiducial : dict fiducial config", "Calculates a Gaussian broadband in rp,rt for the sky residuals. Parameters ---------- bb_term", "= self._r / 100. * self._mu r2 = self._r / 100. * np.sqrt(1", "= self._r * np.sqrt(1 - self._mu**2) r_shift = np.sqrt(rp**2 + rt**2) mu_shift =", "/ (r_shift**2) * (1 - asymmetry * (1 - mu_shift**2)) xi_rad *= np.exp(-r_shift", "\"\"\" Parameters ---------- config : ConfigParser model section of config file fiducial :", "Get rescaled Xi coordinates ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu = self._rescale_coords(self._r,", "self._Omega_de) else: self.xi_growth = self.compute_growth_old(self._z, self._z_fid, self._Omega_m, self._Omega_de) # Initialize the broadband self.has_bb", "corr = self.broadband_sky(bb_term, params) if 'mul' in pos_type: corr = 1 + corr", "(1 + self._z_eff) self._scale_params = scale_params self._metal_corr = metal_corr # Check if we", "dD1(a, Omega_m, Omega_de): z = 1/a-1 return 1./(a*hubble(z, Omega_m, Omega_de))**3 # Calculate D1", "else: corr += self.broadband(bb_term, params) else: # Initialize the broadband and check #", "= params.get(self._delta_rp_name, 0.) ap, at = self._scale_params.get_ap_at(params, metal_corr=self._metal_corr) rescaled_r, rescaled_mu = self._rescale_coords(self._r, self._mu,", "+ \"-\" + config['type']].append(bb) # Next pick up the sky broadban terms sky_broadbands", "np.sqrt(rp**2 + rt**2) mu_shift = rp / r_shift # Get the QSO radiation", "right model if 'croom' in evol_model: bias_evol = self._bias_evol_croom(params, tracer_name) else: bias_evol =", "if we have dark energy if Omega_de is None: growth = (1 +", "the cross-correlation contribution from standard asymmetry (Bonvin et al. 2014). Parameters ---------- pk", "---------- config : ConfigParser model section of config file fiducial : dict fiducial", "effects') if self.radiation_flag: names = [self._tracer1['name'], self._tracer2['name']] if not ('QSO' in names and", "self.bb_terms[pos_type]: # Check if it's sky or normal broadband if bb_term['func'] != 'broadband_sky':", "_rescale_coords(r, mu, ap, at, delta_rp=0.): \"\"\"Rescale Xi coordinates using ap/at. Parameters ---------- r", "with coordinate grid - r, mu, z scale_params : ScaleParameters ScaleParameters object tracer1", "the name for the parameters of this term name = 'BB-{}-{} {} {}", "metal correlation, by default False \"\"\" self._config = config self._r = coords_grid['r'] self._mu", "\"\"\" # Compute the core xi = self.compute_core(pk, PktoXi_obj, params) # Add bias", "corr is still None if corr is None: if 'mul' in pos_type: corr", "the defaults if z_grid is None: z_grid = self._z if z_fid is None:", "params : dict Computation parameters pos_type : string String with position and type,", "it is QSOxLYA # Does this work for the QSO auto as well?", "not params['peak']: xi += self.compute_qso_radiation(params) # Add relativistic effects if self.relativistic_flag: xi +=", "rescaled_mu = self._rescale_coords(self._r, self._mu, ap, at, delta_rp) # Compute correlation function xi =", "!= self._tracer2['type'] # Get rescaled Xi coordinates delta_rp = params.get(self._delta_rp_name, 0.) ap, at", "in enumerate(sky_broadbands): assert config['rp_rt'] == 'rp,rt' # Create the name for the parameters", "float, optional Delta radius_parallel - nuisance correction for wrong redshift, used for discrete", "!= 'broadband_sky'] for index, config in enumerate(normal_broadbands): # Create the name for the", "np.sqrt(Omega_m*(1+z)**3 + Omega_de + (1-Omega_m-Omega_de)*(1+z)**2) def dD1(a, Omega_m, Omega_de): z = 1/a-1 return", "input P(k), sums the necessary multipoles and rescales the coordinates Parameters ---------- pk", "Compute the core xi = self.compute_core(pk, PktoXi_obj, params) # Add bias evolution xi", "params) # Give defaults if corr is still None if corr is None:", "# Create the name for the parameters of this term name = 'BB-{}-{}-{}'.format(config['cf_name'],", "power-law broadband in r and mu or rp,rt. Parameters ---------- bb_term : dict", "have their separate method of the form 'compute_extension' that can be called from", "et al. 2005. Parameters ---------- params : dict Computation parameters tracer_name : string", "on the grid growth = utils.growth_function(z_grid, Omega_m, Omega_de) # Scale to the fiducial", "of the correlation function. This does the Hankel transform of the input P(k),", "self._z)**2) / (p0 + p1 * (1 + self._z_eff)**2) return bias_z def compute_growth(self,", "Add QSO radiation modeling for cross if self.radiation_flag and not params['peak']: xi +=", "= [] self.bb_terms['pre-mul'] = [] self.bb_terms['post-mul'] = [] # First pick up the", "bb['func'] = config['func'] bb['rp_rt'] = config['rp_rt'] bb['r_config'] = config['r_config'] bb['mu_config'] = config['mu_config'] self.bb_terms[config['pre']", "handling. # ! Slow operations should be kept in init as that is", "dict Computation parameters Returns ------- 1D Array Output xi relativistic \"\"\" assert 'continuous'", "0 rp = r[mask] * mu[mask] + delta_rp rt = r[mask] * np.sqrt(1", "ND Array Bias evolution for tracer \"\"\" handle_name = 'z evol {}'.format(tracer_name) if", "self.compute_xi_asymmetry(pk_lin, PktoXi_obj, params) return xi def compute_core(self, pk, PktoXi_obj, params): \"\"\"Compute the core", "into Xi params : dict Computation parameters Returns ------- 1D Array Output correlation", "self.compute_xi_relativistic(pk_lin, PktoXi_obj, params) # Add standard asymmetry if self.asymmetry_flag: xi += self.compute_xi_asymmetry(pk_lin, PktoXi_obj,", "a metal correlation, by default False \"\"\" self._config = config self._r = coords_grid['r']", "function computation and handling. # ! Slow operations should be kept in init", ": ScaleParameters ScaleParameters object tracer1 : dict Config of tracer 1 tracer2 :", "bb_params.append(params['{} ({},{})'.format( bb_term['name'], i, j)]) bb_params = np.array(bb_params).reshape(-1, r_max - r_min + 1)", "function \"\"\" # Check for delta rp delta_rp = 0. if self._delta_rp_name is", "D1 in 100 values of z between 0 and zmax, then interpolate nbins", "growth**2 def compute_growth_old(self, z_grid=None, z_fid=None, Omega_m=None, Omega_de=None): def hubble(z, Omega_m, Omega_de): return np.sqrt(Omega_m*(1+z)**3", "Returns ------- 1D Array Output correlation function \"\"\" # Compute the core xi", "- mu_shift**2)) xi_rad *= np.exp(-r_shift * ((1 + mu_shift) / lifetime + 1", "p0 = params[\"croom_par0\"] p1 = params[\"croom_par1\"] bias_z = (p0 + p1*(1. + self._z)**2)", "broadband and check # if we need to add or multiply if corr", "self.radiation_flag = False if 'radiation effects' in self._config: self.radiation_flag = self._config.getboolean('radiation effects') if", "delta_rp) # Compute the correlation function xi_rel = PktoXi_obj.pk_to_xi_relativistic(rescaled_r, rescaled_mu, pk, params) return", "params): \"\"\"Calculate the cross-correlation contribution from standard asymmetry (Bonvin et al. 2014). Parameters", "need to add or multiply if corr is None: corr = self.broadband_sky(bb_term, params)", "= 100 zmax = 5. z = zmax * np.arange(nbins, dtype=float) / (nbins-1)", "cross') def compute(self, pk, pk_lin, PktoXi_obj, params): \"\"\"Compute correlation function for input P(k).", "evolution. Parameters ---------- params : dict Computation parameters tracer_name : string Name of", "None]).sum(axis=(0, 1, 2)) return corr def compute_qso_radiation(self, params): \"\"\"Model the contribution of QSO" ]
[ "forked.kill('USR2') for i in xrange(10): if forked.poll() is None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(),", "TS_ASSERT(forked.poll() is None) forked.kill('USR2') for i in xrange(10): if forked.poll() is None: time.sleep(1)", "\"from example_seeds import addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True)[0], 5) forked = host.it.seed.forkCode( \"import", "\"import time\\nwhile True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill() for", "is None) TS_ASSERT(forked.poll() is None) forked.kill('TERM') for i in xrange(10): if forked.poll() is", "None) forked.kill() for i in xrange(10): if forked.poll() is None: time.sleep(1) else: break", "break TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode( \"import time\\nwhile True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is", "None def signalHandler(sigNum, _): global signalReceived signalReceived = sigNum signal.signal(signal.SIGUSR2, signalHandler) while not", "is None) TS_ASSERT(forked.poll() is None) forked.kill() for i in xrange(10): if forked.poll() is", "None) forked.kill('TERM') for i in xrange(10): if forked.poll() is None: time.sleep(1) else: break", "= \"\"\" import signal import time signalReceived = None def signalHandler(sigNum, _): global", "forked.kill('TERM') for i in xrange(10): if forked.poll() is None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(),", "i in xrange(10): if forked.poll() is None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(), False) forked", "import addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True)[0], 5) forked = host.it.seed.forkCode( \"import time\\ntime.sleep(3)\\n\" \"print", "TS_timeout=4) TS_ASSERT(forked.poll()) TS_ASSERT_EQUALS(forked.result(), 5) TS_ASSERT('OUTPUT LINE' in forked.output()) forked = host.it.seed.forkCode( \"import time\\nwhile", "1, second=2, takeSitePackages=True)[0], 3) TS_ASSERT_EQUALS(host.it.seed.runCode( \"from example_seeds import addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True)[0],", "LINE' in forked.output()) forked = host.it.seed.forkCode( \"import time\\nwhile True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is", "in forked.output()) forked = host.it.seed.forkCode( \"import time\\nwhile True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None)", "TS_ASSERT(forked.poll() is None) forked.kill('TERM') for i in xrange(10): if forked.poll() is None: time.sleep(1)", "TS_ASSERT(forked.poll() is None) TS_ASSERT_PREDICATE_TIMEOUT(forked.poll, TS_timeout=4) TS_ASSERT(forked.poll()) TS_ASSERT_EQUALS(forked.result(), 5) TS_ASSERT('OUTPUT LINE' in forked.output()) forked", "global signalReceived signalReceived = sigNum signal.signal(signal.SIGUSR2, signalHandler) while not signalReceived: time.sleep(1) \"\"\" class", "break TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode(SIGNALLED_CALLABLE_CODE, takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None)", "takeSitePackages=True)[0], 5) forked = host.it.seed.forkCode( \"import time\\ntime.sleep(3)\\n\" \"print 'OUTPUT LINE'\\n\" \"from example_seeds import", "if forked.poll() is None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode( \"import", "time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill('TERM') for i in xrange(10):", "is None) forked.kill('USR2') for i in xrange(10): if forked.poll() is None: time.sleep(1) else:", "= addition.addition(2, second=3)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) TS_ASSERT_PREDICATE_TIMEOUT(forked.poll, TS_timeout=4) TS_ASSERT(forked.poll())", "takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill() for i in xrange(10): if", "5) forked = host.it.seed.forkCode( \"import time\\ntime.sleep(3)\\n\" \"print 'OUTPUT LINE'\\n\" \"from example_seeds import addition\\nresult", "is None) forked.kill('TERM') for i in xrange(10): if forked.poll() is None: time.sleep(1) else:", "example_seeds import addition import time SIGNALLED_CALLABLE_CODE = \"\"\" import signal import time signalReceived", "forked = host.it.seed.forkCode( \"import time\\nwhile True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is", "signal.signal(signal.SIGUSR2, signalHandler) while not signalReceived: time.sleep(1) \"\"\" class Test: HOSTS = dict(it=dict(rootfs=\"rootfs-basic\")) def", "in xrange(10): if forked.poll() is None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(), False) forked =", "from example_seeds import addition import time SIGNALLED_CALLABLE_CODE = \"\"\" import signal import time", "import time signalReceived = None def signalHandler(sigNum, _): global signalReceived signalReceived = sigNum", "TS_ASSERT_PREDICATE_TIMEOUT(forked.poll, TS_timeout=4) TS_ASSERT(forked.poll()) TS_ASSERT_EQUALS(forked.result(), 5) TS_ASSERT('OUTPUT LINE' in forked.output()) forked = host.it.seed.forkCode( \"import", "forked.poll() is None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode( \"import time\\nwhile", "is None) TS_ASSERT_PREDICATE_TIMEOUT(forked.poll, TS_timeout=4) TS_ASSERT(forked.poll()) TS_ASSERT_EQUALS(forked.result(), 5) TS_ASSERT('OUTPUT LINE' in forked.output()) forked =", "TS_ASSERT_EQUALS(forked.result(), 5) TS_ASSERT('OUTPUT LINE' in forked.output()) forked = host.it.seed.forkCode( \"import time\\nwhile True: time.sleep(2)\",", "* from example_seeds import addition import time SIGNALLED_CALLABLE_CODE = \"\"\" import signal import", "None) TS_ASSERT(forked.poll() is None) forked.kill('USR2') for i in xrange(10): if forked.poll() is None:", "else: break TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode( \"import time\\nwhile True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll()", "TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) TS_ASSERT_PREDICATE_TIMEOUT(forked.poll, TS_timeout=4) TS_ASSERT(forked.poll()) TS_ASSERT_EQUALS(forked.result(), 5) TS_ASSERT('OUTPUT LINE'", "is None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode(SIGNALLED_CALLABLE_CODE, takeSitePackages=True) TS_ASSERT(forked.poll() is", "dict(it=dict(rootfs=\"rootfs-basic\")) def run(self): TS_ASSERT_EQUALS(host.it.seed.runCallable( addition.addition, 1, second=2, takeSitePackages=True)[0], 3) TS_ASSERT_EQUALS(host.it.seed.runCode( \"from example_seeds import", "time signalReceived = None def signalHandler(sigNum, _): global signalReceived signalReceived = sigNum signal.signal(signal.SIGUSR2,", "is None) forked.kill() for i in xrange(10): if forked.poll() is None: time.sleep(1) else:", "\"print 'OUTPUT LINE'\\n\" \"from example_seeds import addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True) TS_ASSERT(forked.poll() is", "= sigNum signal.signal(signal.SIGUSR2, signalHandler) while not signalReceived: time.sleep(1) \"\"\" class Test: HOSTS =", "signal import time signalReceived = None def signalHandler(sigNum, _): global signalReceived signalReceived =", "\"\"\" import signal import time signalReceived = None def signalHandler(sigNum, _): global signalReceived", "import signal import time signalReceived = None def signalHandler(sigNum, _): global signalReceived signalReceived", "None) TS_ASSERT(forked.poll() is None) forked.kill('TERM') for i in xrange(10): if forked.poll() is None:", "TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill('USR2') for i in xrange(10): if forked.poll()", "TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode(SIGNALLED_CALLABLE_CODE, takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill('USR2')", "time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill() for i in xrange(10):", "is None) TS_ASSERT(forked.poll() is None) forked.kill('USR2') for i in xrange(10): if forked.poll() is", "takeSitePackages=True)[0], 3) TS_ASSERT_EQUALS(host.it.seed.runCode( \"from example_seeds import addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True)[0], 5) forked", "SIGNALLED_CALLABLE_CODE = \"\"\" import signal import time signalReceived = None def signalHandler(sigNum, _):", "3) TS_ASSERT_EQUALS(host.it.seed.runCode( \"from example_seeds import addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True)[0], 5) forked =", "import addition import time SIGNALLED_CALLABLE_CODE = \"\"\" import signal import time signalReceived =", "5) TS_ASSERT('OUTPUT LINE' in forked.output()) forked = host.it.seed.forkCode( \"import time\\nwhile True: time.sleep(2)\", takeSitePackages=True)", "Test: HOSTS = dict(it=dict(rootfs=\"rootfs-basic\")) def run(self): TS_ASSERT_EQUALS(host.it.seed.runCallable( addition.addition, 1, second=2, takeSitePackages=True)[0], 3) TS_ASSERT_EQUALS(host.it.seed.runCode(", "else: break TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode(SIGNALLED_CALLABLE_CODE, takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is", "takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill('TERM') for i in xrange(10): if", "example_seeds import addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True)[0], 5) forked = host.it.seed.forkCode( \"import time\\ntime.sleep(3)\\n\"", "= host.it.seed.forkCode(SIGNALLED_CALLABLE_CODE, takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill('USR2') for i in", "second=2, takeSitePackages=True)[0], 3) TS_ASSERT_EQUALS(host.it.seed.runCode( \"from example_seeds import addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True)[0], 5)", "= None def signalHandler(sigNum, _): global signalReceived signalReceived = sigNum signal.signal(signal.SIGUSR2, signalHandler) while", "_): global signalReceived signalReceived = sigNum signal.signal(signal.SIGUSR2, signalHandler) while not signalReceived: time.sleep(1) \"\"\"", "'OUTPUT LINE'\\n\" \"from example_seeds import addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None)", "if forked.poll() is None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode(SIGNALLED_CALLABLE_CODE, takeSitePackages=True)", "TS_ASSERT('OUTPUT LINE' in forked.output()) forked = host.it.seed.forkCode( \"import time\\nwhile True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll()", "import addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) TS_ASSERT_PREDICATE_TIMEOUT(forked.poll,", "= addition.addition(2, second=3)\", takeSitePackages=True)[0], 5) forked = host.it.seed.forkCode( \"import time\\ntime.sleep(3)\\n\" \"print 'OUTPUT LINE'\\n\"", "is None) TS_ASSERT(forked.poll() is None) TS_ASSERT_PREDICATE_TIMEOUT(forked.poll, TS_timeout=4) TS_ASSERT(forked.poll()) TS_ASSERT_EQUALS(forked.result(), 5) TS_ASSERT('OUTPUT LINE' in", "signalHandler(sigNum, _): global signalReceived signalReceived = sigNum signal.signal(signal.SIGUSR2, signalHandler) while not signalReceived: time.sleep(1)", "None) TS_ASSERT(forked.poll() is None) forked.kill() for i in xrange(10): if forked.poll() is None:", "None) forked.kill('USR2') for i in xrange(10): if forked.poll() is None: time.sleep(1) else: break", "addition.addition(2, second=3)\", takeSitePackages=True)[0], 5) forked = host.it.seed.forkCode( \"import time\\ntime.sleep(3)\\n\" \"print 'OUTPUT LINE'\\n\" \"from", "time\\ntime.sleep(3)\\n\" \"print 'OUTPUT LINE'\\n\" \"from example_seeds import addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True) TS_ASSERT(forked.poll()", "forked = host.it.seed.forkCode( \"import time\\ntime.sleep(3)\\n\" \"print 'OUTPUT LINE'\\n\" \"from example_seeds import addition\\nresult =", "signalHandler) while not signalReceived: time.sleep(1) \"\"\" class Test: HOSTS = dict(it=dict(rootfs=\"rootfs-basic\")) def run(self):", "import time SIGNALLED_CALLABLE_CODE = \"\"\" import signal import time signalReceived = None def", "not signalReceived: time.sleep(1) \"\"\" class Test: HOSTS = dict(it=dict(rootfs=\"rootfs-basic\")) def run(self): TS_ASSERT_EQUALS(host.it.seed.runCallable( addition.addition,", "addition.addition(2, second=3)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) TS_ASSERT_PREDICATE_TIMEOUT(forked.poll, TS_timeout=4) TS_ASSERT(forked.poll()) TS_ASSERT_EQUALS(forked.result(),", "TS_ASSERT_EQUALS(host.it.seed.runCode( \"from example_seeds import addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True)[0], 5) forked = host.it.seed.forkCode(", "None) TS_ASSERT_PREDICATE_TIMEOUT(forked.poll, TS_timeout=4) TS_ASSERT(forked.poll()) TS_ASSERT_EQUALS(forked.result(), 5) TS_ASSERT('OUTPUT LINE' in forked.output()) forked = host.it.seed.forkCode(", "TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode( \"import time\\nwhile True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None)", "second=3)\", takeSitePackages=True)[0], 5) forked = host.it.seed.forkCode( \"import time\\ntime.sleep(3)\\n\" \"print 'OUTPUT LINE'\\n\" \"from example_seeds", "forked.output()) forked = host.it.seed.forkCode( \"import time\\nwhile True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll()", "for i in xrange(10): if forked.poll() is None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(), False)", "takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) TS_ASSERT_PREDICATE_TIMEOUT(forked.poll, TS_timeout=4) TS_ASSERT(forked.poll()) TS_ASSERT_EQUALS(forked.result(), 5) TS_ASSERT('OUTPUT", "import * from example_seeds import addition import time SIGNALLED_CALLABLE_CODE = \"\"\" import signal", "TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill() for i in xrange(10): if forked.poll()", "\"import time\\ntime.sleep(3)\\n\" \"print 'OUTPUT LINE'\\n\" \"from example_seeds import addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True)", "True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill('TERM') for i in", "time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode(SIGNALLED_CALLABLE_CODE, takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll()", "time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode( \"import time\\nwhile True: time.sleep(2)\", takeSitePackages=True)", "time\\nwhile True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill('TERM') for i", "host.it.seed.forkCode( \"import time\\ntime.sleep(3)\\n\" \"print 'OUTPUT LINE'\\n\" \"from example_seeds import addition\\nresult = addition.addition(2, second=3)\",", "TS_ASSERT(forked.poll()) TS_ASSERT_EQUALS(forked.result(), 5) TS_ASSERT('OUTPUT LINE' in forked.output()) forked = host.it.seed.forkCode( \"import time\\nwhile True:", "addition import time SIGNALLED_CALLABLE_CODE = \"\"\" import signal import time signalReceived = None", "\"from example_seeds import addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is", "from strato.racktest.infra.suite import * from example_seeds import addition import time SIGNALLED_CALLABLE_CODE = \"\"\"", "host.it.seed.forkCode( \"import time\\nwhile True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill()", "signalReceived = sigNum signal.signal(signal.SIGUSR2, signalHandler) while not signalReceived: time.sleep(1) \"\"\" class Test: HOSTS", "def run(self): TS_ASSERT_EQUALS(host.it.seed.runCallable( addition.addition, 1, second=2, takeSitePackages=True)[0], 3) TS_ASSERT_EQUALS(host.it.seed.runCode( \"from example_seeds import addition\\nresult", "False) forked = host.it.seed.forkCode( \"import time\\nwhile True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll()", "example_seeds import addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None)", "addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True)[0], 5) forked = host.it.seed.forkCode( \"import time\\ntime.sleep(3)\\n\" \"print 'OUTPUT", "= host.it.seed.forkCode( \"import time\\nwhile True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None)", "forked.poll() is None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode(SIGNALLED_CALLABLE_CODE, takeSitePackages=True) TS_ASSERT(forked.poll()", "class Test: HOSTS = dict(it=dict(rootfs=\"rootfs-basic\")) def run(self): TS_ASSERT_EQUALS(host.it.seed.runCallable( addition.addition, 1, second=2, takeSitePackages=True)[0], 3)", "signalReceived = None def signalHandler(sigNum, _): global signalReceived signalReceived = sigNum signal.signal(signal.SIGUSR2, signalHandler)", "TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill('TERM') for i in xrange(10): if forked.poll()", "forked.kill() for i in xrange(10): if forked.poll() is None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(),", "time.sleep(1) \"\"\" class Test: HOSTS = dict(it=dict(rootfs=\"rootfs-basic\")) def run(self): TS_ASSERT_EQUALS(host.it.seed.runCallable( addition.addition, 1, second=2,", "addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) TS_ASSERT_PREDICATE_TIMEOUT(forked.poll, TS_timeout=4)", "time\\nwhile True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill() for i", "while not signalReceived: time.sleep(1) \"\"\" class Test: HOSTS = dict(it=dict(rootfs=\"rootfs-basic\")) def run(self): TS_ASSERT_EQUALS(host.it.seed.runCallable(", "second=3)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) TS_ASSERT_PREDICATE_TIMEOUT(forked.poll, TS_timeout=4) TS_ASSERT(forked.poll()) TS_ASSERT_EQUALS(forked.result(), 5)", "signalReceived signalReceived = sigNum signal.signal(signal.SIGUSR2, signalHandler) while not signalReceived: time.sleep(1) \"\"\" class Test:", "sigNum signal.signal(signal.SIGUSR2, signalHandler) while not signalReceived: time.sleep(1) \"\"\" class Test: HOSTS = dict(it=dict(rootfs=\"rootfs-basic\"))", "for i in xrange(10): if forked.poll() is None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(), True)", "takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill('USR2') for i in xrange(10): if", "run(self): TS_ASSERT_EQUALS(host.it.seed.runCallable( addition.addition, 1, second=2, takeSitePackages=True)[0], 3) TS_ASSERT_EQUALS(host.it.seed.runCode( \"from example_seeds import addition\\nresult =", "\"import time\\nwhile True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill('TERM') for", "False) forked = host.it.seed.forkCode(SIGNALLED_CALLABLE_CODE, takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill('USR2') for", "= dict(it=dict(rootfs=\"rootfs-basic\")) def run(self): TS_ASSERT_EQUALS(host.it.seed.runCallable( addition.addition, 1, second=2, takeSitePackages=True)[0], 3) TS_ASSERT_EQUALS(host.it.seed.runCode( \"from example_seeds", "strato.racktest.infra.suite import * from example_seeds import addition import time SIGNALLED_CALLABLE_CODE = \"\"\" import", "signalReceived: time.sleep(1) \"\"\" class Test: HOSTS = dict(it=dict(rootfs=\"rootfs-basic\")) def run(self): TS_ASSERT_EQUALS(host.it.seed.runCallable( addition.addition, 1,", "def signalHandler(sigNum, _): global signalReceived signalReceived = sigNum signal.signal(signal.SIGUSR2, signalHandler) while not signalReceived:", "TS_ASSERT(forked.poll() is None) forked.kill() for i in xrange(10): if forked.poll() is None: time.sleep(1)", "time SIGNALLED_CALLABLE_CODE = \"\"\" import signal import time signalReceived = None def signalHandler(sigNum,", "\"\"\" class Test: HOSTS = dict(it=dict(rootfs=\"rootfs-basic\")) def run(self): TS_ASSERT_EQUALS(host.it.seed.runCallable( addition.addition, 1, second=2, takeSitePackages=True)[0],", "xrange(10): if forked.poll() is None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode(", "None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode(SIGNALLED_CALLABLE_CODE, takeSitePackages=True) TS_ASSERT(forked.poll() is None)", "LINE'\\n\" \"from example_seeds import addition\\nresult = addition.addition(2, second=3)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll()", "is None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode( \"import time\\nwhile True:", "= host.it.seed.forkCode( \"import time\\ntime.sleep(3)\\n\" \"print 'OUTPUT LINE'\\n\" \"from example_seeds import addition\\nresult = addition.addition(2,", "addition.addition, 1, second=2, takeSitePackages=True)[0], 3) TS_ASSERT_EQUALS(host.it.seed.runCode( \"from example_seeds import addition\\nresult = addition.addition(2, second=3)\",", "None) TS_ASSERT(forked.poll() is None) TS_ASSERT_PREDICATE_TIMEOUT(forked.poll, TS_timeout=4) TS_ASSERT(forked.poll()) TS_ASSERT_EQUALS(forked.result(), 5) TS_ASSERT('OUTPUT LINE' in forked.output())", "None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode( \"import time\\nwhile True: time.sleep(2)\",", "HOSTS = dict(it=dict(rootfs=\"rootfs-basic\")) def run(self): TS_ASSERT_EQUALS(host.it.seed.runCallable( addition.addition, 1, second=2, takeSitePackages=True)[0], 3) TS_ASSERT_EQUALS(host.it.seed.runCode( \"from", "True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill() for i in", "xrange(10): if forked.poll() is None: time.sleep(1) else: break TS_ASSERT_EQUALS(forked.poll(), False) forked = host.it.seed.forkCode(SIGNALLED_CALLABLE_CODE,", "TS_ASSERT_EQUALS(host.it.seed.runCallable( addition.addition, 1, second=2, takeSitePackages=True)[0], 3) TS_ASSERT_EQUALS(host.it.seed.runCode( \"from example_seeds import addition\\nresult = addition.addition(2,", "host.it.seed.forkCode(SIGNALLED_CALLABLE_CODE, takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill('USR2') for i in xrange(10):", "host.it.seed.forkCode( \"import time\\nwhile True: time.sleep(2)\", takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill('TERM')", "forked = host.it.seed.forkCode(SIGNALLED_CALLABLE_CODE, takeSitePackages=True) TS_ASSERT(forked.poll() is None) TS_ASSERT(forked.poll() is None) forked.kill('USR2') for i" ]
[ "self.get(route=route) or {} def get_permissions_all(self): \"\"\" Provide permission information for the current user.", "params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or", "= f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties\" return self.get(route=route) or {} def get_dashboard_item_property(self,dashboardId,itemId,propertyKey): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties/{propertyKey}\" return self.get(route=route)", "= f\"rest/api/2/dashboard/{dId}\" return self.get(route=route) or {} # Filter def get_filter(self,fId): route = f\"rest/api/2/filter/{fId}\"", "or {} def get_property(self,key=None,permissionLevel=None): \"\"\" Returns an application property. :key: OPT :permissionLevel: OPT", "api_token = self.get_from_config(\"api_token\") required_string = f\"{email}:{api_token}\" encoded = base64.b64encode( required_string.encode(\"utf-8\")).decode(\"utf-8\") self.headers = {", "return self.get(route=route,params=params) or {} def get_transitions(self,issueIdOrKey,transitionId=None): params={} if(transitionId): params[\"transitionId\"] = transitionId route =", "if(includeInactiveUsers): params[\"includeInactiveUsers\"] = includeInactiveUsers if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route", "= f\"rest/api/2/project/search\" return self.get(route=route,params=params) or {} # User def get_user(self,accountId=None): params={} if(accountId): params[\"accountId\"]", "= \"rest/api/2/group/member\" return self.get(route=route,params=params) or {} # Issues --partial def get_issue(self,issueIdOrKey): route =", "required_string = f\"{email}:{api_token}\" encoded = base64.b64encode( required_string.encode(\"utf-8\")).decode(\"utf-8\") self.headers = { 'Authorization': f\"Basic {encoded}\",", "{} def get_comment(self,issueIdOrKey,cId): route = f\"rest/api/2/issue/{issueIdOrKey}/comment/{cId}\" return self.get(route=route) or {} # Permissions def", "route = f\"rest/api/2/avatar/{avtype}/system\" return self.get(route=route) or {} def get_avatars(self,avtype,entityid): \"\"\" Returns the system", "{} # Audit Records def get_audit_records(self,startat=None,maxresults=None): \"\"\" Returns a list of audit records.", "route = f\"rest/api/2/dashboard/{dId}\" return self.get(route=route) or {} # Filter def get_filter(self,fId): route =", "\"rest/api/2/auditing/record\" return self.get(route=route,params=params) or {} # Avatars def get_system_avatars_by_type(self,avtype): \"\"\" Returns a list", "file \"\"\" email = self.get_from_config(\"email\") api_token = self.get_from_config(\"api_token\") required_string = f\"{email}:{api_token}\" encoded =", "owner types are issue type, project, or user. :avtype: - avatar type \"\"\"", "The maximum number of results to return. \"\"\" params={} if(startat): params[\"startat\"] = startat", "'Content-Type': \"application/json\" } def get(self, route, params=None): \"\"\" Get the API Response \"\"\"", "self.get(route=route) or {} # Audit Records def get_audit_records(self,startat=None,maxresults=None): \"\"\" Returns a list of", ":startat: - The number of records to skip before returning the first result.", "configparser import ConfigParser from constants import BASE_URL import requests import base64 class JiraAPI:", "= \"rest/api/2/mypermissions\" return self.get(route=route) or {} def get_permissions_all(self): \"\"\" Provide permission information for", "def __init__(self): \"\"\" Get the username and password from the secrets.ini file \"\"\"", "import requests import base64 class JiraAPI: headers={} base_url=BASE_URL @staticmethod def get_from_config(item): config =", "= maxResults route = f\"rest/api/2/project/search\" return self.get(route=route,params=params) or {} # User def get_user(self,accountId=None):", "https://developer.atlassian.com/cloud/jira/platform/rest/v2/ # https://id.atlassian.com/manage/api-tokens - create the api token # https://developer.atlassian.com/cloud/jira/platform/basic-auth-for-rest-apis/ - doing it", "params[\"transitionId\"] = transitionId route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {} def get_comments(self,issueIdOrKey,startAt=None,maxResults=None): params={}", "Returns a list of audit records. :startat: - The number of records to", "of the item the avatar is associated with. \"\"\" route = f\"rest/api/2/universal_avatar/type/{avtype}/owner/{entityid}\" return", "startat if(maxresults): params[\"maxresults\"] = maxresults route = \"rest/api/2/auditing/record\" return self.get(route=route,params=params) or {} #", "if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/comments\" return self.get(route=route,params=params) or {} def get_comment(self,issueIdOrKey,cId):", "--partial def get_issue(self,issueIdOrKey): route = f\"rest/api/2/issue/{issueIdOrKey}\" return self.get(route=route) or {} def get_changelogs(self,issueIdOrKey,startAt=None,maxResults=None): params={}", "= f\"rest/api/2/applicationrole/{key}\" return self.get(route=route) or {} # Audit Records def get_audit_records(self,startat=None,maxresults=None): \"\"\" Returns", "before returning the first result. :maxresults: - The maximum number of results to", "the system and custom avatars for a project or issue type. :avtype: -", "the api token # https://developer.atlassian.com/cloud/jira/platform/basic-auth-for-rest-apis/ - doing it from configparser import ConfigParser from", "- avatar type :entityid: - The ID of the item the avatar is", "\"\"\" Get the username and password from the secrets.ini file \"\"\" email =", "params[\"groupname\"] = groupname if(includeInactiveUsers): params[\"includeInactiveUsers\"] = includeInactiveUsers if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"]", "= startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {}", "for a project or issue type. :avtype: - avatar type :entityid: - The", "permission information for the current user. \"\"\" route = \"rest/api/2/permissions\" return self.get(route=route) or", "property. :key: OPT :permissionLevel: OPT \"\"\" params={} if(key): params[\"key\"] = key if(permissionLevel): params[\"permissionLevel\"]", "self.get_from_config(\"email\") api_token = self.get_from_config(\"api_token\") required_string = f\"{email}:{api_token}\" encoded = base64.b64encode( required_string.encode(\"utf-8\")).decode(\"utf-8\") self.headers =", "key if(permissionLevel): params[\"permissionLevel\"] = permissionLevel route = \"rest/api/2/application-properties\" return self.get(route=route,params=params) # Projects --", "the current user. \"\"\" route = \"rest/api/2/mypermissions\" return self.get(route=route) or {} def get_permissions_all(self):", "get_application_roles_all(self): \"\"\" Returns all application roles. \"\"\" route = \"rest/api/2/applicationrole\" return self.get(route=route) or", "get_users_from_group(self,groupname,includeInactiveUsers=None,startAt=None,maxResults=None): params={} params[\"groupname\"] = groupname if(includeInactiveUsers): params[\"includeInactiveUsers\"] = includeInactiveUsers if(startat): params[\"startat\"] = startat", "or {} # Issues --partial def get_issue(self,issueIdOrKey): route = f\"rest/api/2/issue/{issueIdOrKey}\" return self.get(route=route) or", "def get_my_filters(self): route = \"rest/api/2/filter/my\" return self.get(route=route) or {} # Groups def get_users_from_group(self,groupname,includeInactiveUsers=None,startAt=None,maxResults=None):", "route = \"rest/api/2/applicationrole\" return self.get(route=route) or {} def get_application_roles(self,key): \"\"\" Returns an application", "maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {} def get_transitions(self,issueIdOrKey,transitionId=None): params={} if(transitionId): params[\"transitionId\"]", "owner type, where the owner types are issue type, project, or user. :avtype:", "get_transitions(self,issueIdOrKey,transitionId=None): params={} if(transitionId): params[\"transitionId\"] = transitionId route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {}", "self.get_from_config(\"api_token\") required_string = f\"{email}:{api_token}\" encoded = base64.b64encode( required_string.encode(\"utf-8\")).decode(\"utf-8\") self.headers = { 'Authorization': f\"Basic", "= includeInactiveUsers if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route = \"rest/api/2/group/member\"", "doing it from configparser import ConfigParser from constants import BASE_URL import requests import", "{} def get_permissions_all(self): \"\"\" Provide permission information for the current user. \"\"\" route", "Groups def get_users_from_group(self,groupname,includeInactiveUsers=None,startAt=None,maxResults=None): params={} params[\"groupname\"] = groupname if(includeInactiveUsers): params[\"includeInactiveUsers\"] = includeInactiveUsers if(startat): params[\"startat\"]", "= f\"rest/api/2/avatar/{avtype}/system\" return self.get(route=route) or {} def get_avatars(self,avtype,entityid): \"\"\" Returns the system and", "ID of the item the avatar is associated with. \"\"\" route = f\"rest/api/2/universal_avatar/type/{avtype}/owner/{entityid}\"", "self.get(route=route) or {} # Groups def get_users_from_group(self,groupname,includeInactiveUsers=None,startAt=None,maxResults=None): params={} params[\"groupname\"] = groupname if(includeInactiveUsers): params[\"includeInactiveUsers\"]", "and custom avatars for a project or issue type. :avtype: - avatar type", "return self.get(route=route) or {} # Audit Records def get_audit_records(self,startat=None,maxresults=None): \"\"\" Returns a list", "route = \"rest/api/2/filter/my\" return self.get(route=route) or {} # Groups def get_users_from_group(self,groupname,includeInactiveUsers=None,startAt=None,maxResults=None): params={} params[\"groupname\"]", "# Audit Records def get_audit_records(self,startat=None,maxresults=None): \"\"\" Returns a list of audit records. :startat:", "self.get(route=route) or {} def get_my_filters(self): route = \"rest/api/2/filter/my\" return self.get(route=route) or {} #", "accid if(groupname): params[\"groupname\"] = groupname route = \"rest/api/2/dashboard/search\" return self.get(route=route,params=params) or {} def", "f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties\" return self.get(route=route) or {} def get_dashboard_item_property(self,dashboardId,itemId,propertyKey): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties/{propertyKey}\" return self.get(route=route) or", "return self.get(route=route,params=params) # Projects -- partial def get_project(self,projectIdOrKey): route = f\"rest/api/2/project/{projectIdOrKey}\" return self.get(route=route)", "information for the current user. \"\"\" route = \"rest/api/2/mypermissions\" return self.get(route=route) or {}", "{} def get_comments(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route", "user. \"\"\" route = \"rest/api/2/mypermissions\" return self.get(route=route) or {} def get_permissions_all(self): \"\"\" Provide", "\"\"\" Provide permission information for the current user. \"\"\" route = \"rest/api/2/permissions\" return", "try: return response.json() except: return None # Application roles def get_application_roles_all(self): \"\"\" Returns", "Returns an application roles. :key: - The key of the application role. \"\"\"", "= None if params is None: response = requests.get( f\"{self.base_url}{route}\", headers=self.headers, ) else:", "response = requests.get( f\"{self.base_url}{route}\", headers=self.headers, params=params ) # Return the response to get", "OPT \"\"\" params={} if(key): params[\"key\"] = key if(permissionLevel): params[\"permissionLevel\"] = permissionLevel route =", "# Reference : https://docs.atlassian.com/software/jira/docs/api/REST/8.5.3 # Reference : https://developer.atlassian.com/cloud/jira/platform/rest/v2/ # https://id.atlassian.com/manage/api-tokens - create the", "import ConfigParser from constants import BASE_URL import requests import base64 class JiraAPI: headers={}", "params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/project/search\" return", "def get_issue(self,issueIdOrKey): route = f\"rest/api/2/issue/{issueIdOrKey}\" return self.get(route=route) or {} def get_changelogs(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat):", "maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/comments\" return self.get(route=route,params=params) or {} def get_comment(self,issueIdOrKey,cId): route = f\"rest/api/2/issue/{issueIdOrKey}/comment/{cId}\"", "def get_transitions(self,issueIdOrKey,transitionId=None): params={} if(transitionId): params[\"transitionId\"] = transitionId route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or", "= \"rest/api/2/application-properties\" return self.get(route=route,params=params) # Projects -- partial def get_project(self,projectIdOrKey): route = f\"rest/api/2/project/{projectIdOrKey}\"", "= \"rest/api/2/filter/my\" return self.get(route=route) or {} # Groups def get_users_from_group(self,groupname,includeInactiveUsers=None,startAt=None,maxResults=None): params={} params[\"groupname\"] =", ":entityid: - The ID of the item the avatar is associated with. \"\"\"", "route = \"rest/api/2/group/member\" return self.get(route=route,params=params) or {} # Issues --partial def get_issue(self,issueIdOrKey): route", "records. :startat: - The number of records to skip before returning the first", "\"\"\" params={} if(key): params[\"key\"] = key if(permissionLevel): params[\"permissionLevel\"] = permissionLevel route = \"rest/api/2/application-properties\"", "self.get(route=route) or {} # Permissions def get_my_permissions(self): \"\"\" Provide permission information for the", "Returns a list of system avatar details by owner type, where the owner", "get_from_config(item): config = ConfigParser() config.read('../secret.ini') try: return config.get('Jira',item) except: return None def __init__(self):", "a project or issue type. :avtype: - avatar type :entityid: - The ID", "params[\"maxResults\"] = maxresults route = \"rest/api/2/dashboard\" return self.get(route=route,params=params) or {} def search_for_dashboards(self,name=None,accid=None,groupname=None): params={}", "class JiraAPI: headers={} base_url=BASE_URL @staticmethod def get_from_config(item): config = ConfigParser() config.read('../secret.ini') try: return", "{} # User def get_user(self,accountId=None): params={} if(accountId): params[\"accountId\"] = accountId route = f\"rest/api/2/project/search\"", "API Response \"\"\" print(f\"{self.base_url}{route}\") response = None if params is None: response =", "the owner types are issue type, project, or user. :avtype: - avatar type", "self.get(route=route,params=params) or {} def search_for_dashboards(self,name=None,accid=None,groupname=None): params={} if(name): params[\"dashboardName\"] = name if(accid): params[\"accountId\"] =", "it from configparser import ConfigParser from constants import BASE_URL import requests import base64", "None: response = requests.get( f\"{self.base_url}{route}\", headers=self.headers, ) else: response = requests.get( f\"{self.base_url}{route}\", headers=self.headers,", "f\"rest/api/2/universal_avatar/type/{avtype}/owner/{entityid}\" return self.get(route=route) or {} # Dashboard def get_all_dashboards(self,startat=None,maxresults=None): params={} if(startat): params[\"startAt\"] =", "type. :avtype: - avatar type :entityid: - The ID of the item the", "is associated with. \"\"\" route = f\"rest/api/2/universal_avatar/type/{avtype}/owner/{entityid}\" return self.get(route=route) or {} # Dashboard", "or {} def get_comment(self,issueIdOrKey,cId): route = f\"rest/api/2/issue/{issueIdOrKey}/comment/{cId}\" return self.get(route=route) or {} # Permissions", "Reference : https://developer.atlassian.com/cloud/jira/platform/rest/v2/ # https://id.atlassian.com/manage/api-tokens - create the api token # https://developer.atlassian.com/cloud/jira/platform/basic-auth-for-rest-apis/ -", "maximum number of results to return. \"\"\" params={} if(startat): params[\"startat\"] = startat if(maxresults):", "= permissionLevel route = \"rest/api/2/application-properties\" return self.get(route=route,params=params) # Projects -- partial def get_project(self,projectIdOrKey):", "audit records. :startat: - The number of records to skip before returning the", "- The number of records to skip before returning the first result. :maxresults:", "None if params is None: response = requests.get( f\"{self.base_url}{route}\", headers=self.headers, ) else: response", "or {} def get_avatars(self,avtype,entityid): \"\"\" Returns the system and custom avatars for a", "self.get(route=route) or {} def get_dashboard_item_property(self,dashboardId,itemId,propertyKey): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties/{propertyKey}\" return self.get(route=route) or {} def", "route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties/{propertyKey}\" return self.get(route=route) or {} def get_dashboard(self,dId): route = f\"rest/api/2/dashboard/{dId}\" return", "= groupname route = \"rest/api/2/dashboard/search\" return self.get(route=route,params=params) or {} def get_dashboard_item_property_keys(self,dashboardId,itemId): route =", "None def __init__(self): \"\"\" Get the username and password from the secrets.ini file", "is None: response = requests.get( f\"{self.base_url}{route}\", headers=self.headers, ) else: response = requests.get( f\"{self.base_url}{route}\",", "# Return the response to get the required data try: return response.json() except:", "response = None if params is None: response = requests.get( f\"{self.base_url}{route}\", headers=self.headers, )", "of system avatar details by owner type, where the owner types are issue", "params={} if(transitionId): params[\"transitionId\"] = transitionId route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {} def", "search_for_dashboards(self,name=None,accid=None,groupname=None): params={} if(name): params[\"dashboardName\"] = name if(accid): params[\"accountId\"] = accid if(groupname): params[\"groupname\"] =", "requests import base64 class JiraAPI: headers={} base_url=BASE_URL @staticmethod def get_from_config(item): config = ConfigParser()", "roles. \"\"\" route = \"rest/api/2/applicationrole\" return self.get(route=route) or {} def get_application_roles(self,key): \"\"\" Returns", "'Authorization': f\"Basic {encoded}\", 'Content-Type': \"application/json\" } def get(self, route, params=None): \"\"\" Get the", "params[\"maxResults\"] = maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {} def get_transitions(self,issueIdOrKey,transitionId=None): params={}", "def get_audit_records(self,startat=None,maxresults=None): \"\"\" Returns a list of audit records. :startat: - The number", "return self.get(route=route,params=params) or {} def search_for_dashboards(self,name=None,accid=None,groupname=None): params={} if(name): params[\"dashboardName\"] = name if(accid): params[\"accountId\"]", "or {} def search_for_dashboards(self,name=None,accid=None,groupname=None): params={} if(name): params[\"dashboardName\"] = name if(accid): params[\"accountId\"] = accid", "route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {} def get_transitions(self,issueIdOrKey,transitionId=None): params={} if(transitionId): params[\"transitionId\"] =", "-- partial def get_project(self,projectIdOrKey): route = f\"rest/api/2/project/{projectIdOrKey}\" return self.get(route=route) or {} def get_all_projects(self,startAt=None,maxResults=None):", "startat if(maxResults): params[\"maxResults\"] = maxResults route = \"rest/api/2/group/member\" return self.get(route=route,params=params) or {} #", "\"\"\" Returns an application roles. :key: - The key of the application role.", "a list of audit records. :startat: - The number of records to skip", "get_comment(self,issueIdOrKey,cId): route = f\"rest/api/2/issue/{issueIdOrKey}/comment/{cId}\" return self.get(route=route) or {} # Permissions def get_my_permissions(self): \"\"\"", "an application property. :key: OPT :permissionLevel: OPT \"\"\" params={} if(key): params[\"key\"] = key", "required data try: return response.json() except: return None # Application roles def get_application_roles_all(self):", "f\"Basic {encoded}\", 'Content-Type': \"application/json\" } def get(self, route, params=None): \"\"\" Get the API", "\"\"\" Returns the system and custom avatars for a project or issue type.", "get_changelogs(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\"", "get_property(self,key=None,permissionLevel=None): \"\"\" Returns an application property. :key: OPT :permissionLevel: OPT \"\"\" params={} if(key):", "{ 'Authorization': f\"Basic {encoded}\", 'Content-Type': \"application/json\" } def get(self, route, params=None): \"\"\" Get", "f\"rest/api/2/applicationrole/{key}\" return self.get(route=route) or {} # Audit Records def get_audit_records(self,startat=None,maxresults=None): \"\"\" Returns a", "\"application/json\" } def get(self, route, params=None): \"\"\" Get the API Response \"\"\" print(f\"{self.base_url}{route}\")", "response to get the required data try: return response.json() except: return None #", "route = \"rest/api/2/permissions\" return self.get(route=route) or {} def get_property(self,key=None,permissionLevel=None): \"\"\" Returns an application", "where the owner types are issue type, project, or user. :avtype: - avatar", "avatar is associated with. \"\"\" route = f\"rest/api/2/universal_avatar/type/{avtype}/owner/{entityid}\" return self.get(route=route) or {} #", "Projects -- partial def get_project(self,projectIdOrKey): route = f\"rest/api/2/project/{projectIdOrKey}\" return self.get(route=route) or {} def", "def get_filter(self,fId): route = f\"rest/api/2/filter/{fId}\" return self.get(route=route) or {} def get_my_filters(self): route =", "or {} def get_transitions(self,issueIdOrKey,transitionId=None): params={} if(transitionId): params[\"transitionId\"] = transitionId route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return", "maxResults route = f\"rest/api/2/project/search\" return self.get(route=route,params=params) or {} # User def get_user(self,accountId=None): params={}", "config.get('Jira',item) except: return None def __init__(self): \"\"\" Get the username and password from", ": https://docs.atlassian.com/software/jira/docs/api/REST/8.5.3 # Reference : https://developer.atlassian.com/cloud/jira/platform/rest/v2/ # https://id.atlassian.com/manage/api-tokens - create the api token", "first result. :maxresults: - The maximum number of results to return. \"\"\" params={}", "route, params=None): \"\"\" Get the API Response \"\"\" print(f\"{self.base_url}{route}\") response = None if", "to skip before returning the first result. :maxresults: - The maximum number of", "token # https://developer.atlassian.com/cloud/jira/platform/basic-auth-for-rest-apis/ - doing it from configparser import ConfigParser from constants import", "number of records to skip before returning the first result. :maxresults: - The", "startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {} def", "- The ID of the item the avatar is associated with. \"\"\" route", "return self.get(route=route) or {} def get_dashboard(self,dId): route = f\"rest/api/2/dashboard/{dId}\" return self.get(route=route) or {}", "or {} # Dashboard def get_all_dashboards(self,startat=None,maxresults=None): params={} if(startat): params[\"startAt\"] = startat if(maxresults): params[\"maxResults\"]", "get_dashboard_item_property(self,dashboardId,itemId,propertyKey): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties/{propertyKey}\" return self.get(route=route) or {} def get_dashboard(self,dId): route = f\"rest/api/2/dashboard/{dId}\"", "if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {} def get_transitions(self,issueIdOrKey,transitionId=None):", "if(maxResults): params[\"maxResults\"] = maxResults route = \"rest/api/2/group/member\" return self.get(route=route,params=params) or {} # Issues", "def get_users_from_group(self,groupname,includeInactiveUsers=None,startAt=None,maxResults=None): params={} params[\"groupname\"] = groupname if(includeInactiveUsers): params[\"includeInactiveUsers\"] = includeInactiveUsers if(startat): params[\"startat\"] =", "JiraAPI: headers={} base_url=BASE_URL @staticmethod def get_from_config(item): config = ConfigParser() config.read('../secret.ini') try: return config.get('Jira',item)", "to get the required data try: return response.json() except: return None # Application", "Dashboard def get_all_dashboards(self,startat=None,maxresults=None): params={} if(startat): params[\"startAt\"] = startat if(maxresults): params[\"maxResults\"] = maxresults route", "the current user. \"\"\" route = \"rest/api/2/permissions\" return self.get(route=route) or {} def get_property(self,key=None,permissionLevel=None):", "\"\"\" print(f\"{self.base_url}{route}\") response = None if params is None: response = requests.get( f\"{self.base_url}{route}\",", "return self.get(route=route,params=params) or {} # Avatars def get_system_avatars_by_type(self,avtype): \"\"\" Returns a list of", "f\"rest/api/2/dashboard/{dId}\" return self.get(route=route) or {} # Filter def get_filter(self,fId): route = f\"rest/api/2/filter/{fId}\" return", "self.get(route=route) or {} def get_all_projects(self,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] =", "the username and password from the secrets.ini file \"\"\" email = self.get_from_config(\"email\") api_token", "\"\"\" Provide permission information for the current user. \"\"\" route = \"rest/api/2/mypermissions\" return", "list of system avatar details by owner type, where the owner types are", "{} # Issues --partial def get_issue(self,issueIdOrKey): route = f\"rest/api/2/issue/{issueIdOrKey}\" return self.get(route=route) or {}", "# Reference : https://developer.atlassian.com/cloud/jira/platform/rest/v2/ # https://id.atlassian.com/manage/api-tokens - create the api token # https://developer.atlassian.com/cloud/jira/platform/basic-auth-for-rest-apis/", "type :entityid: - The ID of the item the avatar is associated with.", "return self.get(route=route,params=params) or {} def get_dashboard_item_property_keys(self,dashboardId,itemId): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties\" return self.get(route=route) or {}", "Application roles def get_application_roles_all(self): \"\"\" Returns all application roles. \"\"\" route = \"rest/api/2/applicationrole\"", "params={} if(name): params[\"dashboardName\"] = name if(accid): params[\"accountId\"] = accid if(groupname): params[\"groupname\"] = groupname", "or {} def get_dashboard_item_property_keys(self,dashboardId,itemId): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties\" return self.get(route=route) or {} def get_dashboard_item_property(self,dashboardId,itemId,propertyKey):", "current user. \"\"\" route = \"rest/api/2/permissions\" return self.get(route=route) or {} def get_property(self,key=None,permissionLevel=None): \"\"\"", "The ID of the item the avatar is associated with. \"\"\" route =", "groupname if(includeInactiveUsers): params[\"includeInactiveUsers\"] = includeInactiveUsers if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults", "get_project(self,projectIdOrKey): route = f\"rest/api/2/project/{projectIdOrKey}\" return self.get(route=route) or {} def get_all_projects(self,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"]", "\"rest/api/2/application-properties\" return self.get(route=route,params=params) # Projects -- partial def get_project(self,projectIdOrKey): route = f\"rest/api/2/project/{projectIdOrKey}\" return", "the first result. :maxresults: - The maximum number of results to return. \"\"\"", "get_audit_records(self,startat=None,maxresults=None): \"\"\" Returns a list of audit records. :startat: - The number of", "= maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {} def get_transitions(self,issueIdOrKey,transitionId=None): params={} if(transitionId):", "self.get(route=route) or {} # Filter def get_filter(self,fId): route = f\"rest/api/2/filter/{fId}\" return self.get(route=route) or", ") # Return the response to get the required data try: return response.json()", "__init__(self): \"\"\" Get the username and password from the secrets.ini file \"\"\" email", "records to skip before returning the first result. :maxresults: - The maximum number", "or {} def get_changelogs(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults", "return self.get(route=route) or {} # Dashboard def get_all_dashboards(self,startat=None,maxresults=None): params={} if(startat): params[\"startAt\"] = startat", "if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/comments\" return self.get(route=route,params=params)", "The number of records to skip before returning the first result. :maxresults: -", "f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {} def get_comments(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults):", "route = f\"rest/api/2/universal_avatar/type/{avtype}/owner/{entityid}\" return self.get(route=route) or {} # Dashboard def get_all_dashboards(self,startat=None,maxresults=None): params={} if(startat):", "params[\"maxResults\"] = maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/comments\" return self.get(route=route,params=params) or {} def get_comment(self,issueIdOrKey,cId): route", "or {} # Permissions def get_my_permissions(self): \"\"\" Provide permission information for the current", "application roles. :key: - The key of the application role. \"\"\" route =", "{} def search_for_dashboards(self,name=None,accid=None,groupname=None): params={} if(name): params[\"dashboardName\"] = name if(accid): params[\"accountId\"] = accid if(groupname):", "custom avatars for a project or issue type. :avtype: - avatar type :entityid:", "return self.get(route=route) or {} def get_my_filters(self): route = \"rest/api/2/filter/my\" return self.get(route=route) or {}", "from configparser import ConfigParser from constants import BASE_URL import requests import base64 class", "avatars for a project or issue type. :avtype: - avatar type :entityid: -", "partial def get_project(self,projectIdOrKey): route = f\"rest/api/2/project/{projectIdOrKey}\" return self.get(route=route) or {} def get_all_projects(self,startAt=None,maxResults=None): params={}", "params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return", "or {} # Filter def get_filter(self,fId): route = f\"rest/api/2/filter/{fId}\" return self.get(route=route) or {}", ":avtype: - avatar type \"\"\" route = f\"rest/api/2/avatar/{avtype}/system\" return self.get(route=route) or {} def", "the required data try: return response.json() except: return None # Application roles def", "route = f\"rest/api/2/issue/{issueIdOrKey}\" return self.get(route=route) or {} def get_changelogs(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] =", "route = f\"rest/api/2/project/{projectIdOrKey}\" return self.get(route=route) or {} def get_all_projects(self,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] =", "import base64 class JiraAPI: headers={} base_url=BASE_URL @staticmethod def get_from_config(item): config = ConfigParser() config.read('../secret.ini')", "- create the api token # https://developer.atlassian.com/cloud/jira/platform/basic-auth-for-rest-apis/ - doing it from configparser import", "of records to skip before returning the first result. :maxresults: - The maximum", "return self.get(route=route) or {} def get_avatars(self,avtype,entityid): \"\"\" Returns the system and custom avatars", "or {} # Groups def get_users_from_group(self,groupname,includeInactiveUsers=None,startAt=None,maxResults=None): params={} params[\"groupname\"] = groupname if(includeInactiveUsers): params[\"includeInactiveUsers\"] =", "def get_system_avatars_by_type(self,avtype): \"\"\" Returns a list of system avatar details by owner type,", "return None # Application roles def get_application_roles_all(self): \"\"\" Returns all application roles. \"\"\"", "headers=self.headers, ) else: response = requests.get( f\"{self.base_url}{route}\", headers=self.headers, params=params ) # Return the", "\"\"\" route = f\"rest/api/2/avatar/{avtype}/system\" return self.get(route=route) or {} def get_avatars(self,avtype,entityid): \"\"\" Returns the", "= \"rest/api/2/permissions\" return self.get(route=route) or {} def get_property(self,key=None,permissionLevel=None): \"\"\" Returns an application property.", "get_avatars(self,avtype,entityid): \"\"\" Returns the system and custom avatars for a project or issue", "self.get(route=route) or {} def get_avatars(self,avtype,entityid): \"\"\" Returns the system and custom avatars for", "f\"rest/api/2/project/{projectIdOrKey}\" return self.get(route=route) or {} def get_all_projects(self,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults):", "get_comments(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/comments\"", "def get_user(self,accountId=None): params={} if(accountId): params[\"accountId\"] = accountId route = f\"rest/api/2/project/search\" return self.get(route=route,params=params) or", "params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/project/search\" return self.get(route=route,params=params) or", "= key if(permissionLevel): params[\"permissionLevel\"] = permissionLevel route = \"rest/api/2/application-properties\" return self.get(route=route,params=params) # Projects", "{} def get_avatars(self,avtype,entityid): \"\"\" Returns the system and custom avatars for a project", "get_issue(self,issueIdOrKey): route = f\"rest/api/2/issue/{issueIdOrKey}\" return self.get(route=route) or {} def get_changelogs(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"]", "= transitionId route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {} def get_comments(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat):", "avatar type \"\"\" route = f\"rest/api/2/avatar/{avtype}/system\" return self.get(route=route) or {} def get_avatars(self,avtype,entityid): \"\"\"", "= f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties/{propertyKey}\" return self.get(route=route) or {} def get_dashboard(self,dId): route = f\"rest/api/2/dashboard/{dId}\" return self.get(route=route)", "route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties\" return self.get(route=route) or {} def get_dashboard_item_property(self,dashboardId,itemId,propertyKey): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties/{propertyKey}\" return", "encoded = base64.b64encode( required_string.encode(\"utf-8\")).decode(\"utf-8\") self.headers = { 'Authorization': f\"Basic {encoded}\", 'Content-Type': \"application/json\" }", "get_my_filters(self): route = \"rest/api/2/filter/my\" return self.get(route=route) or {} # Groups def get_users_from_group(self,groupname,includeInactiveUsers=None,startAt=None,maxResults=None): params={}", "} def get(self, route, params=None): \"\"\" Get the API Response \"\"\" print(f\"{self.base_url}{route}\") response", "startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/comments\" return self.get(route=route,params=params) or {} def", "Provide permission information for the current user. \"\"\" route = \"rest/api/2/permissions\" return self.get(route=route)", "\"\"\" route = \"rest/api/2/mypermissions\" return self.get(route=route) or {} def get_permissions_all(self): \"\"\" Provide permission", "if params is None: response = requests.get( f\"{self.base_url}{route}\", headers=self.headers, ) else: response =", "skip before returning the first result. :maxresults: - The maximum number of results", "@staticmethod def get_from_config(item): config = ConfigParser() config.read('../secret.ini') try: return config.get('Jira',item) except: return None", "- The key of the application role. \"\"\" route = f\"rest/api/2/applicationrole/{key}\" return self.get(route=route)", "or {} # Avatars def get_system_avatars_by_type(self,avtype): \"\"\" Returns a list of system avatar", "except: return None def __init__(self): \"\"\" Get the username and password from the", "Get the username and password from the secrets.ini file \"\"\" email = self.get_from_config(\"email\")", "\"\"\" email = self.get_from_config(\"email\") api_token = self.get_from_config(\"api_token\") required_string = f\"{email}:{api_token}\" encoded = base64.b64encode(", "self.get(route=route) or {} def get_application_roles(self,key): \"\"\" Returns an application roles. :key: - The", "params={} if(startat): params[\"startat\"] = startat if(maxresults): params[\"maxresults\"] = maxresults route = \"rest/api/2/auditing/record\" return", "self.get(route=route) or {} def get_changelogs(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] =", "params={} params[\"groupname\"] = groupname if(includeInactiveUsers): params[\"includeInactiveUsers\"] = includeInactiveUsers if(startat): params[\"startat\"] = startat if(maxResults):", "roles. :key: - The key of the application role. \"\"\" route = f\"rest/api/2/applicationrole/{key}\"", "# Dashboard def get_all_dashboards(self,startat=None,maxresults=None): params={} if(startat): params[\"startAt\"] = startat if(maxresults): params[\"maxResults\"] = maxresults", "list of audit records. :startat: - The number of records to skip before", "or {} # User def get_user(self,accountId=None): params={} if(accountId): params[\"accountId\"] = accountId route =", "- The maximum number of results to return. \"\"\" params={} if(startat): params[\"startat\"] =", "= \"rest/api/2/dashboard\" return self.get(route=route,params=params) or {} def search_for_dashboards(self,name=None,accid=None,groupname=None): params={} if(name): params[\"dashboardName\"] = name", "Returns all application roles. \"\"\" route = \"rest/api/2/applicationrole\" return self.get(route=route) or {} def", "key of the application role. \"\"\" route = f\"rest/api/2/applicationrole/{key}\" return self.get(route=route) or {}", "maxresults route = \"rest/api/2/dashboard\" return self.get(route=route,params=params) or {} def search_for_dashboards(self,name=None,accid=None,groupname=None): params={} if(name): params[\"dashboardName\"]", "def get_dashboard_item_property_keys(self,dashboardId,itemId): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties\" return self.get(route=route) or {} def get_dashboard_item_property(self,dashboardId,itemId,propertyKey): route =", "get_all_projects(self,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/project/search\"", "= \"rest/api/2/dashboard/search\" return self.get(route=route,params=params) or {} def get_dashboard_item_property_keys(self,dashboardId,itemId): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties\" return self.get(route=route)", "= startat if(maxResults): params[\"maxResults\"] = maxResults route = \"rest/api/2/group/member\" return self.get(route=route,params=params) or {}", "headers={} base_url=BASE_URL @staticmethod def get_from_config(item): config = ConfigParser() config.read('../secret.ini') try: return config.get('Jira',item) except:", "def get_all_dashboards(self,startat=None,maxresults=None): params={} if(startat): params[\"startAt\"] = startat if(maxresults): params[\"maxResults\"] = maxresults route =", "= groupname if(includeInactiveUsers): params[\"includeInactiveUsers\"] = includeInactiveUsers if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] =", "\"\"\" route = \"rest/api/2/applicationrole\" return self.get(route=route) or {} def get_application_roles(self,key): \"\"\" Returns an", "= maxresults route = \"rest/api/2/auditing/record\" return self.get(route=route,params=params) or {} # Avatars def get_system_avatars_by_type(self,avtype):", "except: return None # Application roles def get_application_roles_all(self): \"\"\" Returns all application roles.", "Records def get_audit_records(self,startat=None,maxresults=None): \"\"\" Returns a list of audit records. :startat: - The", "= ConfigParser() config.read('../secret.ini') try: return config.get('Jira',item) except: return None def __init__(self): \"\"\" Get", "Issues --partial def get_issue(self,issueIdOrKey): route = f\"rest/api/2/issue/{issueIdOrKey}\" return self.get(route=route) or {} def get_changelogs(self,issueIdOrKey,startAt=None,maxResults=None):", "def get_comment(self,issueIdOrKey,cId): route = f\"rest/api/2/issue/{issueIdOrKey}/comment/{cId}\" return self.get(route=route) or {} # Permissions def get_my_permissions(self):", "password from the secrets.ini file \"\"\" email = self.get_from_config(\"email\") api_token = self.get_from_config(\"api_token\") required_string", "route = \"rest/api/2/dashboard\" return self.get(route=route,params=params) or {} def search_for_dashboards(self,name=None,accid=None,groupname=None): params={} if(name): params[\"dashboardName\"] =", "api token # https://developer.atlassian.com/cloud/jira/platform/basic-auth-for-rest-apis/ - doing it from configparser import ConfigParser from constants", "type, project, or user. :avtype: - avatar type \"\"\" route = f\"rest/api/2/avatar/{avtype}/system\" return", ":maxresults: - The maximum number of results to return. \"\"\" params={} if(startat): params[\"startat\"]", "name if(accid): params[\"accountId\"] = accid if(groupname): params[\"groupname\"] = groupname route = \"rest/api/2/dashboard/search\" return", "return self.get(route=route) or {} # Filter def get_filter(self,fId): route = f\"rest/api/2/filter/{fId}\" return self.get(route=route)", "Filter def get_filter(self,fId): route = f\"rest/api/2/filter/{fId}\" return self.get(route=route) or {} def get_my_filters(self): route", "self.get(route=route,params=params) or {} def get_dashboard_item_property_keys(self,dashboardId,itemId): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties\" return self.get(route=route) or {} def", "returning the first result. :maxresults: - The maximum number of results to return.", "ConfigParser() config.read('../secret.ini') try: return config.get('Jira',item) except: return None def __init__(self): \"\"\" Get the", "response = requests.get( f\"{self.base_url}{route}\", headers=self.headers, ) else: response = requests.get( f\"{self.base_url}{route}\", headers=self.headers, params=params", "print(f\"{self.base_url}{route}\") response = None if params is None: response = requests.get( f\"{self.base_url}{route}\", headers=self.headers,", "= f\"{email}:{api_token}\" encoded = base64.b64encode( required_string.encode(\"utf-8\")).decode(\"utf-8\") self.headers = { 'Authorization': f\"Basic {encoded}\", 'Content-Type':", "import BASE_URL import requests import base64 class JiraAPI: headers={} base_url=BASE_URL @staticmethod def get_from_config(item):", "Returns the system and custom avatars for a project or issue type. :avtype:", "issue type. :avtype: - avatar type :entityid: - The ID of the item", "params[\"maxResults\"] = maxResults route = \"rest/api/2/group/member\" return self.get(route=route,params=params) or {} # Issues --partial", "return self.get(route=route,params=params) or {} def get_comment(self,issueIdOrKey,cId): route = f\"rest/api/2/issue/{issueIdOrKey}/comment/{cId}\" return self.get(route=route) or {}", "\"\"\" Returns an application property. :key: OPT :permissionLevel: OPT \"\"\" params={} if(key): params[\"key\"]", "def get_application_roles(self,key): \"\"\" Returns an application roles. :key: - The key of the", "def get_application_roles_all(self): \"\"\" Returns all application roles. \"\"\" route = \"rest/api/2/applicationrole\" return self.get(route=route)", "get the required data try: return response.json() except: return None # Application roles", ":key: - The key of the application role. \"\"\" route = f\"rest/api/2/applicationrole/{key}\" return", "return. \"\"\" params={} if(startat): params[\"startat\"] = startat if(maxresults): params[\"maxresults\"] = maxresults route =", "data try: return response.json() except: return None # Application roles def get_application_roles_all(self): \"\"\"", "Provide permission information for the current user. \"\"\" route = \"rest/api/2/mypermissions\" return self.get(route=route)", "username and password from the secrets.ini file \"\"\" email = self.get_from_config(\"email\") api_token =", "def get_dashboard(self,dId): route = f\"rest/api/2/dashboard/{dId}\" return self.get(route=route) or {} # Filter def get_filter(self,fId):", "return self.get(route=route,params=params) or {} def get_comments(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"]", "{} # Dashboard def get_all_dashboards(self,startat=None,maxresults=None): params={} if(startat): params[\"startAt\"] = startat if(maxresults): params[\"maxResults\"] =", "config.read('../secret.ini') try: return config.get('Jira',item) except: return None def __init__(self): \"\"\" Get the username", "- doing it from configparser import ConfigParser from constants import BASE_URL import requests", "= name if(accid): params[\"accountId\"] = accid if(groupname): params[\"groupname\"] = groupname route = \"rest/api/2/dashboard/search\"", "{} # Groups def get_users_from_group(self,groupname,includeInactiveUsers=None,startAt=None,maxResults=None): params={} params[\"groupname\"] = groupname if(includeInactiveUsers): params[\"includeInactiveUsers\"] = includeInactiveUsers", "route = f\"rest/api/2/issue/{issueIdOrKey}/comment/{cId}\" return self.get(route=route) or {} # Permissions def get_my_permissions(self): \"\"\" Provide", "get_dashboard(self,dId): route = f\"rest/api/2/dashboard/{dId}\" return self.get(route=route) or {} # Filter def get_filter(self,fId): route", "OPT :permissionLevel: OPT \"\"\" params={} if(key): params[\"key\"] = key if(permissionLevel): params[\"permissionLevel\"] = permissionLevel", "params[\"permissionLevel\"] = permissionLevel route = \"rest/api/2/application-properties\" return self.get(route=route,params=params) # Projects -- partial def", "required_string.encode(\"utf-8\")).decode(\"utf-8\") self.headers = { 'Authorization': f\"Basic {encoded}\", 'Content-Type': \"application/json\" } def get(self, route,", "project, or user. :avtype: - avatar type \"\"\" route = f\"rest/api/2/avatar/{avtype}/system\" return self.get(route=route)", "params[\"startat\"] = startat if(maxresults): params[\"maxresults\"] = maxresults route = \"rest/api/2/auditing/record\" return self.get(route=route,params=params) or", "if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params)", "user. \"\"\" route = \"rest/api/2/permissions\" return self.get(route=route) or {} def get_property(self,key=None,permissionLevel=None): \"\"\" Returns", "{} def get_all_projects(self,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route", "\"\"\" route = f\"rest/api/2/applicationrole/{key}\" return self.get(route=route) or {} # Audit Records def get_audit_records(self,startat=None,maxresults=None):", "number of results to return. \"\"\" params={} if(startat): params[\"startat\"] = startat if(maxresults): params[\"maxresults\"]", "\"\"\" route = f\"rest/api/2/universal_avatar/type/{avtype}/owner/{entityid}\" return self.get(route=route) or {} # Dashboard def get_all_dashboards(self,startat=None,maxresults=None): params={}", "f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties/{propertyKey}\" return self.get(route=route) or {} def get_dashboard(self,dId): route = f\"rest/api/2/dashboard/{dId}\" return self.get(route=route) or", "type, where the owner types are issue type, project, or user. :avtype: -", "= f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {} def get_comments(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat", "associated with. \"\"\" route = f\"rest/api/2/universal_avatar/type/{avtype}/owner/{entityid}\" return self.get(route=route) or {} # Dashboard def", "results to return. \"\"\" params={} if(startat): params[\"startat\"] = startat if(maxresults): params[\"maxresults\"] = maxresults", "def get_my_permissions(self): \"\"\" Provide permission information for the current user. \"\"\" route =", "BASE_URL import requests import base64 class JiraAPI: headers={} base_url=BASE_URL @staticmethod def get_from_config(item): config", "get_system_avatars_by_type(self,avtype): \"\"\" Returns a list of system avatar details by owner type, where", "return self.get(route=route) or {} def get_dashboard_item_property(self,dashboardId,itemId,propertyKey): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties/{propertyKey}\" return self.get(route=route) or {}", "current user. \"\"\" route = \"rest/api/2/mypermissions\" return self.get(route=route) or {} def get_permissions_all(self): \"\"\"", "from the secrets.ini file \"\"\" email = self.get_from_config(\"email\") api_token = self.get_from_config(\"api_token\") required_string =", "= f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {} def get_transitions(self,issueIdOrKey,transitionId=None): params={} if(transitionId): params[\"transitionId\"] = transitionId", "{} def get_changelogs(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route", "\"rest/api/2/mypermissions\" return self.get(route=route) or {} def get_permissions_all(self): \"\"\" Provide permission information for the", "the API Response \"\"\" print(f\"{self.base_url}{route}\") response = None if params is None: response", "if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/project/search\" return self.get(route=route,params=params)", "maxResults route = \"rest/api/2/group/member\" return self.get(route=route,params=params) or {} # Issues --partial def get_issue(self,issueIdOrKey):", "application property. :key: OPT :permissionLevel: OPT \"\"\" params={} if(key): params[\"key\"] = key if(permissionLevel):", "params[\"maxResults\"] = maxResults route = f\"rest/api/2/project/search\" return self.get(route=route,params=params) or {} # User def", "a list of system avatar details by owner type, where the owner types", "get_permissions_all(self): \"\"\" Provide permission information for the current user. \"\"\" route = \"rest/api/2/permissions\"", "if(name): params[\"dashboardName\"] = name if(accid): params[\"accountId\"] = accid if(groupname): params[\"groupname\"] = groupname route", "\"\"\" Get the API Response \"\"\" print(f\"{self.base_url}{route}\") response = None if params is", "self.get(route=route,params=params) or {} def get_transitions(self,issueIdOrKey,transitionId=None): params={} if(transitionId): params[\"transitionId\"] = transitionId route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\"", "route = \"rest/api/2/auditing/record\" return self.get(route=route,params=params) or {} # Avatars def get_system_avatars_by_type(self,avtype): \"\"\" Returns", "= self.get_from_config(\"email\") api_token = self.get_from_config(\"api_token\") required_string = f\"{email}:{api_token}\" encoded = base64.b64encode( required_string.encode(\"utf-8\")).decode(\"utf-8\") self.headers", "# User def get_user(self,accountId=None): params={} if(accountId): params[\"accountId\"] = accountId route = f\"rest/api/2/project/search\" return", "f\"{self.base_url}{route}\", headers=self.headers, ) else: response = requests.get( f\"{self.base_url}{route}\", headers=self.headers, params=params ) # Return", "= requests.get( f\"{self.base_url}{route}\", headers=self.headers, ) else: response = requests.get( f\"{self.base_url}{route}\", headers=self.headers, params=params )", "{} # Filter def get_filter(self,fId): route = f\"rest/api/2/filter/{fId}\" return self.get(route=route) or {} def", "# Issues --partial def get_issue(self,issueIdOrKey): route = f\"rest/api/2/issue/{issueIdOrKey}\" return self.get(route=route) or {} def", "params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/comments\" return", ":permissionLevel: OPT \"\"\" params={} if(key): params[\"key\"] = key if(permissionLevel): params[\"permissionLevel\"] = permissionLevel route", "= \"rest/api/2/applicationrole\" return self.get(route=route) or {} def get_application_roles(self,key): \"\"\" Returns an application roles.", "= f\"rest/api/2/filter/{fId}\" return self.get(route=route) or {} def get_my_filters(self): route = \"rest/api/2/filter/my\" return self.get(route=route)", "startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/project/search\" return self.get(route=route,params=params) or {} #", "return self.get(route=route) or {} def get_application_roles(self,key): \"\"\" Returns an application roles. :key: -", "information for the current user. \"\"\" route = \"rest/api/2/permissions\" return self.get(route=route) or {}", "Permissions def get_my_permissions(self): \"\"\" Provide permission information for the current user. \"\"\" route", "application roles. \"\"\" route = \"rest/api/2/applicationrole\" return self.get(route=route) or {} def get_application_roles(self,key): \"\"\"", "{} def get_dashboard_item_property(self,dashboardId,itemId,propertyKey): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties/{propertyKey}\" return self.get(route=route) or {} def get_dashboard(self,dId): route", "params={} if(startat): params[\"startAt\"] = startat if(maxresults): params[\"maxResults\"] = maxresults route = \"rest/api/2/dashboard\" return", "= startat if(maxresults): params[\"maxresults\"] = maxresults route = \"rest/api/2/auditing/record\" return self.get(route=route,params=params) or {}", "https://docs.atlassian.com/software/jira/docs/api/REST/8.5.3 # Reference : https://developer.atlassian.com/cloud/jira/platform/rest/v2/ # https://id.atlassian.com/manage/api-tokens - create the api token #", "= \"rest/api/2/auditing/record\" return self.get(route=route,params=params) or {} # Avatars def get_system_avatars_by_type(self,avtype): \"\"\" Returns a", "= startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/project/search\" return self.get(route=route,params=params) or {}", "{} def get_property(self,key=None,permissionLevel=None): \"\"\" Returns an application property. :key: OPT :permissionLevel: OPT \"\"\"", "route = f\"rest/api/2/project/search\" return self.get(route=route,params=params) or {} # User def get_user(self,accountId=None): params={} if(accountId):", "if(maxresults): params[\"maxresults\"] = maxresults route = \"rest/api/2/auditing/record\" return self.get(route=route,params=params) or {} # Avatars", "route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {} def get_comments(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] =", "get_user(self,accountId=None): params={} if(accountId): params[\"accountId\"] = accountId route = f\"rest/api/2/project/search\" return self.get(route=route,params=params) or {}", "if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route = \"rest/api/2/group/member\" return self.get(route=route,params=params)", "transitionId route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {} def get_comments(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"]", "\"rest/api/2/applicationrole\" return self.get(route=route) or {} def get_application_roles(self,key): \"\"\" Returns an application roles. :key:", "User def get_user(self,accountId=None): params={} if(accountId): params[\"accountId\"] = accountId route = f\"rest/api/2/project/search\" return self.get(route=route,params=params)", "return self.get(route=route,params=params) or {} # User def get_user(self,accountId=None): params={} if(accountId): params[\"accountId\"] = accountId", "def get_from_config(item): config = ConfigParser() config.read('../secret.ini') try: return config.get('Jira',item) except: return None def", "ConfigParser from constants import BASE_URL import requests import base64 class JiraAPI: headers={} base_url=BASE_URL", "= self.get_from_config(\"api_token\") required_string = f\"{email}:{api_token}\" encoded = base64.b64encode( required_string.encode(\"utf-8\")).decode(\"utf-8\") self.headers = { 'Authorization':", "Return the response to get the required data try: return response.json() except: return", "\"rest/api/2/group/member\" return self.get(route=route,params=params) or {} # Issues --partial def get_issue(self,issueIdOrKey): route = f\"rest/api/2/issue/{issueIdOrKey}\"", "the item the avatar is associated with. \"\"\" route = f\"rest/api/2/universal_avatar/type/{avtype}/owner/{entityid}\" return self.get(route=route)", "self.headers = { 'Authorization': f\"Basic {encoded}\", 'Content-Type': \"application/json\" } def get(self, route, params=None):", "# https://id.atlassian.com/manage/api-tokens - create the api token # https://developer.atlassian.com/cloud/jira/platform/basic-auth-for-rest-apis/ - doing it from", "self.get(route=route) or {} def get_dashboard(self,dId): route = f\"rest/api/2/dashboard/{dId}\" return self.get(route=route) or {} #", ": https://developer.atlassian.com/cloud/jira/platform/rest/v2/ # https://id.atlassian.com/manage/api-tokens - create the api token # https://developer.atlassian.com/cloud/jira/platform/basic-auth-for-rest-apis/ - doing", "# Filter def get_filter(self,fId): route = f\"rest/api/2/filter/{fId}\" return self.get(route=route) or {} def get_my_filters(self):", "system and custom avatars for a project or issue type. :avtype: - avatar", "def search_for_dashboards(self,name=None,accid=None,groupname=None): params={} if(name): params[\"dashboardName\"] = name if(accid): params[\"accountId\"] = accid if(groupname): params[\"groupname\"]", "def get_dashboard_item_property(self,dashboardId,itemId,propertyKey): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties/{propertyKey}\" return self.get(route=route) or {} def get_dashboard(self,dId): route =", "# https://developer.atlassian.com/cloud/jira/platform/basic-auth-for-rest-apis/ - doing it from configparser import ConfigParser from constants import BASE_URL", "self.get(route=route,params=params) or {} def get_comment(self,issueIdOrKey,cId): route = f\"rest/api/2/issue/{issueIdOrKey}/comment/{cId}\" return self.get(route=route) or {} #", "params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/comments\" return self.get(route=route,params=params) or", "route = \"rest/api/2/mypermissions\" return self.get(route=route) or {} def get_permissions_all(self): \"\"\" Provide permission information", "the secrets.ini file \"\"\" email = self.get_from_config(\"email\") api_token = self.get_from_config(\"api_token\") required_string = f\"{email}:{api_token}\"", "params=None): \"\"\" Get the API Response \"\"\" print(f\"{self.base_url}{route}\") response = None if params", "= maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/comments\" return self.get(route=route,params=params) or {} def get_comment(self,issueIdOrKey,cId): route =", "{} def get_dashboard(self,dId): route = f\"rest/api/2/dashboard/{dId}\" return self.get(route=route) or {} # Filter def", "if(transitionId): params[\"transitionId\"] = transitionId route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {} def get_comments(self,issueIdOrKey,startAt=None,maxResults=None):", "f\"{email}:{api_token}\" encoded = base64.b64encode( required_string.encode(\"utf-8\")).decode(\"utf-8\") self.headers = { 'Authorization': f\"Basic {encoded}\", 'Content-Type': \"application/json\"", "of the application role. \"\"\" route = f\"rest/api/2/applicationrole/{key}\" return self.get(route=route) or {} #", "if(groupname): params[\"groupname\"] = groupname route = \"rest/api/2/dashboard/search\" return self.get(route=route,params=params) or {} def get_dashboard_item_property_keys(self,dashboardId,itemId):", "# Application roles def get_application_roles_all(self): \"\"\" Returns all application roles. \"\"\" route =", "\"\"\" params={} if(startat): params[\"startat\"] = startat if(maxresults): params[\"maxresults\"] = maxresults route = \"rest/api/2/auditing/record\"", "if(permissionLevel): params[\"permissionLevel\"] = permissionLevel route = \"rest/api/2/application-properties\" return self.get(route=route,params=params) # Projects -- partial", "project or issue type. :avtype: - avatar type :entityid: - The ID of", "or {} def get_permissions_all(self): \"\"\" Provide permission information for the current user. \"\"\"", "self.get(route=route) or {} def get_property(self,key=None,permissionLevel=None): \"\"\" Returns an application property. :key: OPT :permissionLevel:", ") else: response = requests.get( f\"{self.base_url}{route}\", headers=self.headers, params=params ) # Return the response", "of audit records. :startat: - The number of records to skip before returning", "role. \"\"\" route = f\"rest/api/2/applicationrole/{key}\" return self.get(route=route) or {} # Audit Records def", "= f\"rest/api/2/project/{projectIdOrKey}\" return self.get(route=route) or {} def get_all_projects(self,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat", "application role. \"\"\" route = f\"rest/api/2/applicationrole/{key}\" return self.get(route=route) or {} # Audit Records", "params={} if(key): params[\"key\"] = key if(permissionLevel): params[\"permissionLevel\"] = permissionLevel route = \"rest/api/2/application-properties\" return", "Avatars def get_system_avatars_by_type(self,avtype): \"\"\" Returns a list of system avatar details by owner", "Reference : https://docs.atlassian.com/software/jira/docs/api/REST/8.5.3 # Reference : https://developer.atlassian.com/cloud/jira/platform/rest/v2/ # https://id.atlassian.com/manage/api-tokens - create the api", "try: return config.get('Jira',item) except: return None def __init__(self): \"\"\" Get the username and", "self.get(route=route,params=params) or {} # Issues --partial def get_issue(self,issueIdOrKey): route = f\"rest/api/2/issue/{issueIdOrKey}\" return self.get(route=route)", "an application roles. :key: - The key of the application role. \"\"\" route", "{} def get_application_roles(self,key): \"\"\" Returns an application roles. :key: - The key of", "avatar type :entityid: - The ID of the item the avatar is associated", "from constants import BASE_URL import requests import base64 class JiraAPI: headers={} base_url=BASE_URL @staticmethod", "def get_property(self,key=None,permissionLevel=None): \"\"\" Returns an application property. :key: OPT :permissionLevel: OPT \"\"\" params={}", "{encoded}\", 'Content-Type': \"application/json\" } def get(self, route, params=None): \"\"\" Get the API Response", "self.get(route=route,params=params) or {} # Avatars def get_system_avatars_by_type(self,avtype): \"\"\" Returns a list of system", "are issue type, project, or user. :avtype: - avatar type \"\"\" route =", "or {} def get_dashboard_item_property(self,dashboardId,itemId,propertyKey): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties/{propertyKey}\" return self.get(route=route) or {} def get_dashboard(self,dId):", "result. :maxresults: - The maximum number of results to return. \"\"\" params={} if(startat):", "type \"\"\" route = f\"rest/api/2/avatar/{avtype}/system\" return self.get(route=route) or {} def get_avatars(self,avtype,entityid): \"\"\" Returns", "self.get(route=route) or {} # Dashboard def get_all_dashboards(self,startat=None,maxresults=None): params={} if(startat): params[\"startAt\"] = startat if(maxresults):", "params[\"key\"] = key if(permissionLevel): params[\"permissionLevel\"] = permissionLevel route = \"rest/api/2/application-properties\" return self.get(route=route,params=params) #", "return self.get(route=route) or {} # Permissions def get_my_permissions(self): \"\"\" Provide permission information for", "params[\"accountId\"] = accid if(groupname): params[\"groupname\"] = groupname route = \"rest/api/2/dashboard/search\" return self.get(route=route,params=params) or", "the response to get the required data try: return response.json() except: return None", "params[\"maxresults\"] = maxresults route = \"rest/api/2/auditing/record\" return self.get(route=route,params=params) or {} # Avatars def", "= f\"rest/api/2/issue/{issueIdOrKey}/comment/{cId}\" return self.get(route=route) or {} # Permissions def get_my_permissions(self): \"\"\" Provide permission", "or {} def get_application_roles(self,key): \"\"\" Returns an application roles. :key: - The key", "def get_project(self,projectIdOrKey): route = f\"rest/api/2/project/{projectIdOrKey}\" return self.get(route=route) or {} def get_all_projects(self,startAt=None,maxResults=None): params={} if(startat):", "return response.json() except: return None # Application roles def get_application_roles_all(self): \"\"\" Returns all", "= maxresults route = \"rest/api/2/dashboard\" return self.get(route=route,params=params) or {} def search_for_dashboards(self,name=None,accid=None,groupname=None): params={} if(name):", "route = f\"rest/api/2/applicationrole/{key}\" return self.get(route=route) or {} # Audit Records def get_audit_records(self,startat=None,maxresults=None): \"\"\"", "params[\"includeInactiveUsers\"] = includeInactiveUsers if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route =", "and password from the secrets.ini file \"\"\" email = self.get_from_config(\"email\") api_token = self.get_from_config(\"api_token\")", "item the avatar is associated with. \"\"\" route = f\"rest/api/2/universal_avatar/type/{avtype}/owner/{entityid}\" return self.get(route=route) or", "get_filter(self,fId): route = f\"rest/api/2/filter/{fId}\" return self.get(route=route) or {} def get_my_filters(self): route = \"rest/api/2/filter/my\"", "or {} # Audit Records def get_audit_records(self,startat=None,maxresults=None): \"\"\" Returns a list of audit", "\"rest/api/2/permissions\" return self.get(route=route) or {} def get_property(self,key=None,permissionLevel=None): \"\"\" Returns an application property. :key:", "return self.get(route=route,params=params) or {} # Issues --partial def get_issue(self,issueIdOrKey): route = f\"rest/api/2/issue/{issueIdOrKey}\" return", "route = \"rest/api/2/dashboard/search\" return self.get(route=route,params=params) or {} def get_dashboard_item_property_keys(self,dashboardId,itemId): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties\" return", "Get the API Response \"\"\" print(f\"{self.base_url}{route}\") response = None if params is None:", "to return. \"\"\" params={} if(startat): params[\"startat\"] = startat if(maxresults): params[\"maxresults\"] = maxresults route", "create the api token # https://developer.atlassian.com/cloud/jira/platform/basic-auth-for-rest-apis/ - doing it from configparser import ConfigParser", "permissionLevel route = \"rest/api/2/application-properties\" return self.get(route=route,params=params) # Projects -- partial def get_project(self,projectIdOrKey): route", "params[\"dashboardName\"] = name if(accid): params[\"accountId\"] = accid if(groupname): params[\"groupname\"] = groupname route =", "None # Application roles def get_application_roles_all(self): \"\"\" Returns all application roles. \"\"\" route", "Audit Records def get_audit_records(self,startat=None,maxresults=None): \"\"\" Returns a list of audit records. :startat: -", "= accid if(groupname): params[\"groupname\"] = groupname route = \"rest/api/2/dashboard/search\" return self.get(route=route,params=params) or {}", "= maxResults route = \"rest/api/2/group/member\" return self.get(route=route,params=params) or {} # Issues --partial def", "base64 class JiraAPI: headers={} base_url=BASE_URL @staticmethod def get_from_config(item): config = ConfigParser() config.read('../secret.ini') try:", "# Groups def get_users_from_group(self,groupname,includeInactiveUsers=None,startAt=None,maxResults=None): params={} params[\"groupname\"] = groupname if(includeInactiveUsers): params[\"includeInactiveUsers\"] = includeInactiveUsers if(startat):", "email = self.get_from_config(\"email\") api_token = self.get_from_config(\"api_token\") required_string = f\"{email}:{api_token}\" encoded = base64.b64encode( required_string.encode(\"utf-8\")).decode(\"utf-8\")", "or {} def get_dashboard(self,dId): route = f\"rest/api/2/dashboard/{dId}\" return self.get(route=route) or {} # Filter", "get_dashboard_item_property_keys(self,dashboardId,itemId): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties\" return self.get(route=route) or {} def get_dashboard_item_property(self,dashboardId,itemId,propertyKey): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties/{propertyKey}\"", "roles def get_application_roles_all(self): \"\"\" Returns all application roles. \"\"\" route = \"rest/api/2/applicationrole\" return", "Response \"\"\" print(f\"{self.base_url}{route}\") response = None if params is None: response = requests.get(", "f\"rest/api/2/issue/{issueIdOrKey}/comment/{cId}\" return self.get(route=route) or {} # Permissions def get_my_permissions(self): \"\"\" Provide permission information", "\"\"\" Returns a list of audit records. :startat: - The number of records", "for the current user. \"\"\" route = \"rest/api/2/permissions\" return self.get(route=route) or {} def", "or {} def get_my_filters(self): route = \"rest/api/2/filter/my\" return self.get(route=route) or {} # Groups", "if(startat): params[\"startAt\"] = startat if(maxresults): params[\"maxResults\"] = maxresults route = \"rest/api/2/dashboard\" return self.get(route=route,params=params)", "of results to return. \"\"\" params={} if(startat): params[\"startat\"] = startat if(maxresults): params[\"maxresults\"] =", "get_application_roles(self,key): \"\"\" Returns an application roles. :key: - The key of the application", "groupname route = \"rest/api/2/dashboard/search\" return self.get(route=route,params=params) or {} def get_dashboard_item_property_keys(self,dashboardId,itemId): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties\"", "return self.get(route=route) or {} def get_changelogs(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"]", "self.get(route=route,params=params) # Projects -- partial def get_project(self,projectIdOrKey): route = f\"rest/api/2/project/{projectIdOrKey}\" return self.get(route=route) or", "the application role. \"\"\" route = f\"rest/api/2/applicationrole/{key}\" return self.get(route=route) or {} # Audit", "def get_changelogs(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route =", "f\"rest/api/2/project/search\" return self.get(route=route,params=params) or {} # User def get_user(self,accountId=None): params={} if(accountId): params[\"accountId\"] =", "or user. :avtype: - avatar type \"\"\" route = f\"rest/api/2/avatar/{avtype}/system\" return self.get(route=route) or", "https://id.atlassian.com/manage/api-tokens - create the api token # https://developer.atlassian.com/cloud/jira/platform/basic-auth-for-rest-apis/ - doing it from configparser", "= startat if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/issue/{issueIdOrKey}/comments\" return self.get(route=route,params=params) or {}", "= { 'Authorization': f\"Basic {encoded}\", 'Content-Type': \"application/json\" } def get(self, route, params=None): \"\"\"", "permission information for the current user. \"\"\" route = \"rest/api/2/mypermissions\" return self.get(route=route) or", "if(accid): params[\"accountId\"] = accid if(groupname): params[\"groupname\"] = groupname route = \"rest/api/2/dashboard/search\" return self.get(route=route,params=params)", "requests.get( f\"{self.base_url}{route}\", headers=self.headers, ) else: response = requests.get( f\"{self.base_url}{route}\", headers=self.headers, params=params ) #", "params[\"groupname\"] = groupname route = \"rest/api/2/dashboard/search\" return self.get(route=route,params=params) or {} def get_dashboard_item_property_keys(self,dashboardId,itemId): route", "# Projects -- partial def get_project(self,projectIdOrKey): route = f\"rest/api/2/project/{projectIdOrKey}\" return self.get(route=route) or {}", "if(startat): params[\"startat\"] = startat if(maxresults): params[\"maxresults\"] = maxresults route = \"rest/api/2/auditing/record\" return self.get(route=route,params=params)", "= startat if(maxresults): params[\"maxResults\"] = maxresults route = \"rest/api/2/dashboard\" return self.get(route=route,params=params) or {}", "f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params) or {} def get_transitions(self,issueIdOrKey,transitionId=None): params={} if(transitionId): params[\"transitionId\"] = transitionId route", "\"rest/api/2/dashboard/search\" return self.get(route=route,params=params) or {} def get_dashboard_item_property_keys(self,dashboardId,itemId): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties\" return self.get(route=route) or", "= base64.b64encode( required_string.encode(\"utf-8\")).decode(\"utf-8\") self.headers = { 'Authorization': f\"Basic {encoded}\", 'Content-Type': \"application/json\" } def", "if(key): params[\"key\"] = key if(permissionLevel): params[\"permissionLevel\"] = permissionLevel route = \"rest/api/2/application-properties\" return self.get(route=route,params=params)", "\"\"\" route = \"rest/api/2/permissions\" return self.get(route=route) or {} def get_property(self,key=None,permissionLevel=None): \"\"\" Returns an", "https://developer.atlassian.com/cloud/jira/platform/basic-auth-for-rest-apis/ - doing it from configparser import ConfigParser from constants import BASE_URL import", "# Permissions def get_my_permissions(self): \"\"\" Provide permission information for the current user. \"\"\"", "= requests.get( f\"{self.base_url}{route}\", headers=self.headers, params=params ) # Return the response to get the", "params is None: response = requests.get( f\"{self.base_url}{route}\", headers=self.headers, ) else: response = requests.get(", "f\"rest/api/2/avatar/{avtype}/system\" return self.get(route=route) or {} def get_avatars(self,avtype,entityid): \"\"\" Returns the system and custom", "\"rest/api/2/dashboard\" return self.get(route=route,params=params) or {} def search_for_dashboards(self,name=None,accid=None,groupname=None): params={} if(name): params[\"dashboardName\"] = name if(accid):", "avatar details by owner type, where the owner types are issue type, project,", ":avtype: - avatar type :entityid: - The ID of the item the avatar", "route = f\"rest/api/2/filter/{fId}\" return self.get(route=route) or {} def get_my_filters(self): route = \"rest/api/2/filter/my\" return", "user. :avtype: - avatar type \"\"\" route = f\"rest/api/2/avatar/{avtype}/system\" return self.get(route=route) or {}", "constants import BASE_URL import requests import base64 class JiraAPI: headers={} base_url=BASE_URL @staticmethod def", "get(self, route, params=None): \"\"\" Get the API Response \"\"\" print(f\"{self.base_url}{route}\") response = None", "get_my_permissions(self): \"\"\" Provide permission information for the current user. \"\"\" route = \"rest/api/2/mypermissions\"", "requests.get( f\"{self.base_url}{route}\", headers=self.headers, params=params ) # Return the response to get the required", "or issue type. :avtype: - avatar type :entityid: - The ID of the", "def get_avatars(self,avtype,entityid): \"\"\" Returns the system and custom avatars for a project or", "def get_comments(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route =", "else: response = requests.get( f\"{self.base_url}{route}\", headers=self.headers, params=params ) # Return the response to", "{} # Permissions def get_my_permissions(self): \"\"\" Provide permission information for the current user.", "return self.get(route=route) or {} def get_permissions_all(self): \"\"\" Provide permission information for the current", ":key: OPT :permissionLevel: OPT \"\"\" params={} if(key): params[\"key\"] = key if(permissionLevel): params[\"permissionLevel\"] =", "{} def get_dashboard_item_property_keys(self,dashboardId,itemId): route = f\"rest/api/2/dashboard/{dashboardId}/items/{itemId}/properties\" return self.get(route=route) or {} def get_dashboard_item_property(self,dashboardId,itemId,propertyKey): route", "{} def get_transitions(self,issueIdOrKey,transitionId=None): params={} if(transitionId): params[\"transitionId\"] = transitionId route = f\"rest/api/2/issue/{issueIdOrKey}/changelog\" return self.get(route=route,params=params)", "= f\"rest/api/2/issue/{issueIdOrKey}/comments\" return self.get(route=route,params=params) or {} def get_comment(self,issueIdOrKey,cId): route = f\"rest/api/2/issue/{issueIdOrKey}/comment/{cId}\" return self.get(route=route)", "Returns an application property. :key: OPT :permissionLevel: OPT \"\"\" params={} if(key): params[\"key\"] =", "details by owner type, where the owner types are issue type, project, or", "if(maxresults): params[\"maxResults\"] = maxresults route = \"rest/api/2/dashboard\" return self.get(route=route,params=params) or {} def search_for_dashboards(self,name=None,accid=None,groupname=None):", "route = f\"rest/api/2/issue/{issueIdOrKey}/comments\" return self.get(route=route,params=params) or {} def get_comment(self,issueIdOrKey,cId): route = f\"rest/api/2/issue/{issueIdOrKey}/comment/{cId}\" return", "system avatar details by owner type, where the owner types are issue type,", "return self.get(route=route) or {} def get_property(self,key=None,permissionLevel=None): \"\"\" Returns an application property. :key: OPT", "return self.get(route=route) or {} # Groups def get_users_from_group(self,groupname,includeInactiveUsers=None,startAt=None,maxResults=None): params={} params[\"groupname\"] = groupname if(includeInactiveUsers):", "f\"{self.base_url}{route}\", headers=self.headers, params=params ) # Return the response to get the required data", "headers=self.headers, params=params ) # Return the response to get the required data try:", "{} # Avatars def get_system_avatars_by_type(self,avtype): \"\"\" Returns a list of system avatar details", "base64.b64encode( required_string.encode(\"utf-8\")).decode(\"utf-8\") self.headers = { 'Authorization': f\"Basic {encoded}\", 'Content-Type': \"application/json\" } def get(self,", "def get_permissions_all(self): \"\"\" Provide permission information for the current user. \"\"\" route =", "\"\"\" Returns all application roles. \"\"\" route = \"rest/api/2/applicationrole\" return self.get(route=route) or {}", "{} def get_my_filters(self): route = \"rest/api/2/filter/my\" return self.get(route=route) or {} # Groups def", "self.get(route=route,params=params) or {} def get_comments(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] =", "all application roles. \"\"\" route = \"rest/api/2/applicationrole\" return self.get(route=route) or {} def get_application_roles(self,key):", "return self.get(route=route) or {} def get_all_projects(self,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"]", "The key of the application role. \"\"\" route = f\"rest/api/2/applicationrole/{key}\" return self.get(route=route) or", "= f\"rest/api/2/issue/{issueIdOrKey}\" return self.get(route=route) or {} def get_changelogs(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat", "or {} def get_comments(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults", "def get_all_projects(self,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route =", "= f\"rest/api/2/universal_avatar/type/{avtype}/owner/{entityid}\" return self.get(route=route) or {} # Dashboard def get_all_dashboards(self,startat=None,maxresults=None): params={} if(startat): params[\"startAt\"]", "config = ConfigParser() config.read('../secret.ini') try: return config.get('Jira',item) except: return None def __init__(self): \"\"\"", "self.get(route=route,params=params) or {} # User def get_user(self,accountId=None): params={} if(accountId): params[\"accountId\"] = accountId route", "the avatar is associated with. \"\"\" route = f\"rest/api/2/universal_avatar/type/{avtype}/owner/{entityid}\" return self.get(route=route) or {}", "params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route = \"rest/api/2/group/member\" return self.get(route=route,params=params) or", "base_url=BASE_URL @staticmethod def get_from_config(item): config = ConfigParser() config.read('../secret.ini') try: return config.get('Jira',item) except: return", "f\"rest/api/2/issue/{issueIdOrKey}\" return self.get(route=route) or {} def get_changelogs(self,issueIdOrKey,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults):", "includeInactiveUsers if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults route = \"rest/api/2/group/member\" return", "route = \"rest/api/2/application-properties\" return self.get(route=route,params=params) # Projects -- partial def get_project(self,projectIdOrKey): route =", "# Avatars def get_system_avatars_by_type(self,avtype): \"\"\" Returns a list of system avatar details by", "or {} def get_all_projects(self,startAt=None,maxResults=None): params={} if(startat): params[\"startat\"] = startat if(maxResults): params[\"maxResults\"] = maxResults", "maxresults route = \"rest/api/2/auditing/record\" return self.get(route=route,params=params) or {} # Avatars def get_system_avatars_by_type(self,avtype): \"\"\"", "return None def __init__(self): \"\"\" Get the username and password from the secrets.ini", "\"\"\" Returns a list of system avatar details by owner type, where the", "with. \"\"\" route = f\"rest/api/2/universal_avatar/type/{avtype}/owner/{entityid}\" return self.get(route=route) or {} # Dashboard def get_all_dashboards(self,startat=None,maxresults=None):", "return config.get('Jira',item) except: return None def __init__(self): \"\"\" Get the username and password", "issue type, project, or user. :avtype: - avatar type \"\"\" route = f\"rest/api/2/avatar/{avtype}/system\"", "\"rest/api/2/filter/my\" return self.get(route=route) or {} # Groups def get_users_from_group(self,groupname,includeInactiveUsers=None,startAt=None,maxResults=None): params={} params[\"groupname\"] = groupname", "startat if(maxresults): params[\"maxResults\"] = maxresults route = \"rest/api/2/dashboard\" return self.get(route=route,params=params) or {} def", "get_all_dashboards(self,startat=None,maxresults=None): params={} if(startat): params[\"startAt\"] = startat if(maxresults): params[\"maxResults\"] = maxresults route = \"rest/api/2/dashboard\"", "secrets.ini file \"\"\" email = self.get_from_config(\"email\") api_token = self.get_from_config(\"api_token\") required_string = f\"{email}:{api_token}\" encoded", "if(maxResults): params[\"maxResults\"] = maxResults route = f\"rest/api/2/project/search\" return self.get(route=route,params=params) or {} # User", "params=params ) # Return the response to get the required data try: return", "- avatar type \"\"\" route = f\"rest/api/2/avatar/{avtype}/system\" return self.get(route=route) or {} def get_avatars(self,avtype,entityid):", "f\"rest/api/2/filter/{fId}\" return self.get(route=route) or {} def get_my_filters(self): route = \"rest/api/2/filter/my\" return self.get(route=route) or", "by owner type, where the owner types are issue type, project, or user.", "response.json() except: return None # Application roles def get_application_roles_all(self): \"\"\" Returns all application", "def get(self, route, params=None): \"\"\" Get the API Response \"\"\" print(f\"{self.base_url}{route}\") response =", "types are issue type, project, or user. :avtype: - avatar type \"\"\" route", "for the current user. \"\"\" route = \"rest/api/2/mypermissions\" return self.get(route=route) or {} def", "f\"rest/api/2/issue/{issueIdOrKey}/comments\" return self.get(route=route,params=params) or {} def get_comment(self,issueIdOrKey,cId): route = f\"rest/api/2/issue/{issueIdOrKey}/comment/{cId}\" return self.get(route=route) or", "params[\"startAt\"] = startat if(maxresults): params[\"maxResults\"] = maxresults route = \"rest/api/2/dashboard\" return self.get(route=route,params=params) or" ]
[ "msg): if self.cli: self.logger.warn(msg) else: wx.LogWarning(msg) log = None # type: Logger or", "= hasattr(self, \"show_toolbar_button\") self.show_toolbar_button = True icon_dir = os.path.dirname(os.path.dirname(__file__)) self.icon_file_name = os.path.join(icon_dir, 'icon.png')", "= logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) formatter = logging.Formatter( \"%(asctime)-15s %(levelname)s %(message)s\") ch.setFormatter(formatter) self.logger.addHandler(ch) def info(self,", "self).__init__() self.name = \"Generate Gerber Package\" self.category = \"Read PCB\" self.pcbnew_icon_support = hasattr(self,", "config.save() config.load_from_ini() dlg = SettingsDialog( config_save_func=save_config, file_name_format_hint=config.FILE_NAME_FORMAT_HINT, version=config.version ) try: config.transfer_to_dialog(dlg.panel) if dlg.ShowModal()", "file_list = parser.plot() logger.info(file_list) if os.path.isabs(config.output_dest_dir): output_file_dir = config.output_dest_dir else: output_file_dir = os.path.join(pcb_file_dir,", "'_', name) return name + '.zip' class KiZipPlugin(pcbnew.ActionPlugin, object): def __init__(self): super(KiZipPlugin, self).__init__()", "# type: (Parser, Config, Logger) -> None global log log = logger pcb_file_name", "pcbdata = parser.parse() file_list = parser.plot() logger.info(file_list) if os.path.isabs(config.output_dest_dir): output_file_dir = config.output_dest_dir else:", "class KiZipPlugin(pcbnew.ActionPlugin, object): def __init__(self): super(KiZipPlugin, self).__init__() self.name = \"Generate Gerber Package\" self.category", "KiZipPlugin(pcbnew.ActionPlugin, object): def __init__(self): super(KiZipPlugin, self).__init__() self.name = \"Generate Gerber Package\" self.category =", "or None def process_substitutions(output_name_format, pcb_file_name, metadata): # type: (str, str, dict)->str name =", "name = name.replace('%D', now.strftime('%Y-%m-%d')) name = name.replace('%T', now.strftime('%H-%M-%S')) # sanitize the name to", "generating gerbers') return parser = Parser(pcb_file_name, config, logger, board) try: run_with_dialog(parser, config, logger)", "self.cli: self.logger.info(*args) def error(self, msg): if self.cli: self.logger.error(msg) else: wx.MessageBox(msg) def warn(self, msg):", "self.logger.error(msg) else: wx.MessageBox(msg) def warn(self, msg): if self.cli: self.logger.warn(msg) else: wx.LogWarning(msg) log =", "process_substitutions( config.output_name_format, pcb_file_name, pcbdata['metadata']) output_file_name = os.path.join(output_file_dir, output_file_name) os.makedirs(output_file_dir, exist_ok=True) #zip up all", "arcname=os.path.basename(filename)) def run_with_dialog(parser, config, logger): # type: (Parser, Config, Logger) -> None def", "import os import json import re import sys from datetime import datetime import", "from ..version import version from ..errors import ParsingException self.version = version board =", "systems name = name.replace('\\\\', '/') name = re.sub(r'[?%*:|\"<>]', '_', name) return name +", "def __init__(self): super(KiZipPlugin, self).__init__() self.name = \"Generate Gerber Package\" self.category = \"Read PCB\"", "Config, Logger) -> None global log log = logger pcb_file_name = os.path.basename(parser.file_name) pcb_file_dir", "icon_dir = os.path.dirname(os.path.dirname(__file__)) self.icon_file_name = os.path.join(icon_dir, 'icon.png') self.description = \"Generate Gerber Package\" def", "else: wx.LogWarning(msg) log = None # type: Logger or None def process_substitutions(output_name_format, pcb_file_name,", "os.path.dirname(parser.file_name) pcbdata = parser.parse() file_list = parser.plot() logger.info(file_list) if os.path.isabs(config.output_dest_dir): output_file_dir = config.output_dest_dir", "pcb_file_name: logger.error('Please save the board file before generating gerbers') return parser = Parser(pcb_file_name,", "ch.setLevel(logging.INFO) formatter = logging.Formatter( \"%(asctime)-15s %(levelname)s %(message)s\") ch.setFormatter(formatter) self.logger.addHandler(ch) def info(self, *args): if", "= name.replace('%r', metadata['revision']) name = name.replace('%d', metadata['date'].replace(':', '-')) now = datetime.now() name =", "__init__(self, cli=False): self.cli = cli self.logger = logging.getLogger('KiZip') self.logger.setLevel(logging.INFO) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO)", "# type: Logger or None def process_substitutions(output_name_format, pcb_file_name, metadata): # type: (str, str,", "= os.path.basename(parser.file_name) pcb_file_dir = os.path.dirname(parser.file_name) pcbdata = parser.parse() file_list = parser.plot() logger.info(file_list) if", "dict)->str name = output_name_format.replace('%f', os.path.splitext(pcb_file_name)[0]) name = name.replace('%p', metadata['title']) name = name.replace('%c', metadata['company'])", "\"Generate Gerber Package\" self.category = \"Read PCB\" self.pcbnew_icon_support = hasattr(self, \"show_toolbar_button\") self.show_toolbar_button =", "output_file_dir = os.path.join(pcb_file_dir, config.output_dest_dir) output_file_name = process_substitutions( config.output_name_format, pcb_file_name, pcbdata['metadata']) output_file_name = os.path.join(output_file_dir,", "\"w\", zipfile.ZIP_DEFLATED) as zf: for filename in file_list: zf.write(filename=os.path.abspath(filename), arcname=os.path.basename(filename)) def run_with_dialog(parser, config,", "# type: (str, str, dict)->str name = output_name_format.replace('%f', os.path.splitext(pcb_file_name)[0]) name = name.replace('%p', metadata['title'])", "self.name = \"Generate Gerber Package\" self.category = \"Read PCB\" self.pcbnew_icon_support = hasattr(self, \"show_toolbar_button\")", "filename in file_list: zf.write(filename=os.path.abspath(filename), arcname=os.path.basename(filename)) def run_with_dialog(parser, config, logger): # type: (Parser, Config,", "file_list: zf.write(filename=os.path.abspath(filename), arcname=os.path.basename(filename)) def run_with_dialog(parser, config, logger): # type: (Parser, Config, Logger) ->", "output_name_format.replace('%f', os.path.splitext(pcb_file_name)[0]) name = name.replace('%p', metadata['title']) name = name.replace('%c', metadata['company']) name = name.replace('%r',", "except ParsingException as e: logger.error(str(e)) def main(parser, config, logger): # type: (Parser, Config,", "from datetime import datetime import logging import wx import zipfile import shutil import", "main(parser, config, logger): # type: (Parser, Config, Logger) -> None global log log", "name + '.zip' class KiZipPlugin(pcbnew.ActionPlugin, object): def __init__(self): super(KiZipPlugin, self).__init__() self.name = \"Generate", "characters illegal in file systems name = name.replace('\\\\', '/') name = re.sub(r'[?%*:|\"<>]', '_',", "import zipfile import shutil import pcbnew from .config import Config from ..dialog import", "output_file_dir = config.output_dest_dir else: output_file_dir = os.path.join(pcb_file_dir, config.output_dest_dir) output_file_name = process_substitutions( config.output_name_format, pcb_file_name,", "config.output_dest_dir else: output_file_dir = os.path.join(pcb_file_dir, config.output_dest_dir) output_file_name = process_substitutions( config.output_name_format, pcb_file_name, pcbdata['metadata']) output_file_name", "self.logger.setLevel(logging.INFO) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) formatter = logging.Formatter( \"%(asctime)-15s %(levelname)s %(message)s\") ch.setFormatter(formatter) self.logger.addHandler(ch)", "ParsingException as e: logger.error(str(e)) def main(parser, config, logger): # type: (Parser, Config, Logger)", "= pcbnew.GetBoard() pcb_file_name = board.GetFileName() config = Config(self.version, os.path.dirname(pcb_file_name)) logger = Logger() if", "zf: for filename in file_list: zf.write(filename=os.path.abspath(filename), arcname=os.path.basename(filename)) def run_with_dialog(parser, config, logger): # type:", "def __init__(self, cli=False): self.cli = cli self.logger = logging.getLogger('KiZip') self.logger.setLevel(logging.INFO) ch = logging.StreamHandler(sys.stdout)", "process_substitutions(output_name_format, pcb_file_name, metadata): # type: (str, str, dict)->str name = output_name_format.replace('%f', os.path.splitext(pcb_file_name)[0]) name", "save_config(dialog_panel): config.set_from_dialog(dialog_panel) config.save() config.load_from_ini() dlg = SettingsDialog( config_save_func=save_config, file_name_format_hint=config.FILE_NAME_FORMAT_HINT, version=config.version ) try: config.transfer_to_dialog(dlg.panel)", "formatter = logging.Formatter( \"%(asctime)-15s %(levelname)s %(message)s\") ch.setFormatter(formatter) self.logger.addHandler(ch) def info(self, *args): if self.cli:", "= Logger() if not pcb_file_name: logger.error('Please save the board file before generating gerbers')", "os.path.basename(parser.file_name) pcb_file_dir = os.path.dirname(parser.file_name) pcbdata = parser.parse() file_list = parser.plot() logger.info(file_list) if os.path.isabs(config.output_dest_dir):", "Logger or None def process_substitutions(output_name_format, pcb_file_name, metadata): # type: (str, str, dict)->str name", "logging import wx import zipfile import shutil import pcbnew from .config import Config", "board file before generating gerbers') return parser = Parser(pcb_file_name, config, logger, board) try:", "output_file_name = os.path.join(output_file_dir, output_file_name) os.makedirs(output_file_dir, exist_ok=True) #zip up all files with zipfile.ZipFile(output_file_name, \"w\",", "= board.GetFileName() config = Config(self.version, os.path.dirname(pcb_file_name)) logger = Logger() if not pcb_file_name: logger.error('Please", "self.cli = cli self.logger = logging.getLogger('KiZip') self.logger.setLevel(logging.INFO) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) formatter =", "(str, str, dict)->str name = output_name_format.replace('%f', os.path.splitext(pcb_file_name)[0]) name = name.replace('%p', metadata['title']) name =", "logger, board) try: run_with_dialog(parser, config, logger) except ParsingException as e: logger.error(str(e)) def main(parser,", "type: Logger or None def process_substitutions(output_name_format, pcb_file_name, metadata): # type: (str, str, dict)->str", "name to avoid characters illegal in file systems name = name.replace('\\\\', '/') name", "= Parser(pcb_file_name, config, logger, board) try: run_with_dialog(parser, config, logger) except ParsingException as e:", "config.output_dest_dir) output_file_name = process_substitutions( config.output_name_format, pcb_file_name, pcbdata['metadata']) output_file_name = os.path.join(output_file_dir, output_file_name) os.makedirs(output_file_dir, exist_ok=True)", "config, logger) except ParsingException as e: logger.error(str(e)) def main(parser, config, logger): # type:", "if not pcb_file_name: logger.error('Please save the board file before generating gerbers') return parser", "Package\" def defaults(self): pass def Run(self): from ..version import version from ..errors import", "*args): if self.cli: self.logger.info(*args) def error(self, msg): if self.cli: self.logger.error(msg) else: wx.MessageBox(msg) def", "version board = pcbnew.GetBoard() pcb_file_name = board.GetFileName() config = Config(self.version, os.path.dirname(pcb_file_name)) logger =", "os.path.join(output_file_dir, output_file_name) os.makedirs(output_file_dir, exist_ok=True) #zip up all files with zipfile.ZipFile(output_file_name, \"w\", zipfile.ZIP_DEFLATED) as", "..errors import ParsingException self.version = version board = pcbnew.GetBoard() pcb_file_name = board.GetFileName() config", "None global log log = logger pcb_file_name = os.path.basename(parser.file_name) pcb_file_dir = os.path.dirname(parser.file_name) pcbdata", "board.GetFileName() config = Config(self.version, os.path.dirname(pcb_file_name)) logger = Logger() if not pcb_file_name: logger.error('Please save", "'icon.png') self.description = \"Generate Gerber Package\" def defaults(self): pass def Run(self): from ..version", "self.logger = logging.getLogger('KiZip') self.logger.setLevel(logging.INFO) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) formatter = logging.Formatter( \"%(asctime)-15s %(levelname)s", "= version board = pcbnew.GetBoard() pcb_file_name = board.GetFileName() config = Config(self.version, os.path.dirname(pcb_file_name)) logger", "sys from datetime import datetime import logging import wx import zipfile import shutil", "%(message)s\") ch.setFormatter(formatter) self.logger.addHandler(ch) def info(self, *args): if self.cli: self.logger.info(*args) def error(self, msg): if", "True icon_dir = os.path.dirname(os.path.dirname(__file__)) self.icon_file_name = os.path.join(icon_dir, 'icon.png') self.description = \"Generate Gerber Package\"", "name.replace('%d', metadata['date'].replace(':', '-')) now = datetime.now() name = name.replace('%D', now.strftime('%Y-%m-%d')) name = name.replace('%T',", "self.logger.addHandler(ch) def info(self, *args): if self.cli: self.logger.info(*args) def error(self, msg): if self.cli: self.logger.error(msg)", "os.path.join(icon_dir, 'icon.png') self.description = \"Generate Gerber Package\" def defaults(self): pass def Run(self): from", "\"%(asctime)-15s %(levelname)s %(message)s\") ch.setFormatter(formatter) self.logger.addHandler(ch) def info(self, *args): if self.cli: self.logger.info(*args) def error(self,", "logging.getLogger('KiZip') self.logger.setLevel(logging.INFO) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) formatter = logging.Formatter( \"%(asctime)-15s %(levelname)s %(message)s\") ch.setFormatter(formatter)", "re.sub(r'[?%*:|\"<>]', '_', name) return name + '.zip' class KiZipPlugin(pcbnew.ActionPlugin, object): def __init__(self): super(KiZipPlugin,", "= os.path.join(pcb_file_dir, config.output_dest_dir) output_file_name = process_substitutions( config.output_name_format, pcb_file_name, pcbdata['metadata']) output_file_name = os.path.join(output_file_dir, output_file_name)", "return name + '.zip' class KiZipPlugin(pcbnew.ActionPlugin, object): def __init__(self): super(KiZipPlugin, self).__init__() self.name =", "Gerber Package\" self.category = \"Read PCB\" self.pcbnew_icon_support = hasattr(self, \"show_toolbar_button\") self.show_toolbar_button = True", "msg): if self.cli: self.logger.error(msg) else: wx.MessageBox(msg) def warn(self, msg): if self.cli: self.logger.warn(msg) else:", "as e: logger.error(str(e)) def main(parser, config, logger): # type: (Parser, Config, Logger) ->", "from .parser import Parser class Logger(object): def __init__(self, cli=False): self.cli = cli self.logger", "str, dict)->str name = output_name_format.replace('%f', os.path.splitext(pcb_file_name)[0]) name = name.replace('%p', metadata['title']) name = name.replace('%c',", "def warn(self, msg): if self.cli: self.logger.warn(msg) else: wx.LogWarning(msg) log = None # type:", "= os.path.join(output_file_dir, output_file_name) os.makedirs(output_file_dir, exist_ok=True) #zip up all files with zipfile.ZipFile(output_file_name, \"w\", zipfile.ZIP_DEFLATED)", "logger) except ParsingException as e: logger.error(str(e)) def main(parser, config, logger): # type: (Parser,", "..errors import ParsingException from .parser import Parser class Logger(object): def __init__(self, cli=False): self.cli", "..version import version from ..errors import ParsingException self.version = version board = pcbnew.GetBoard()", "name) return name + '.zip' class KiZipPlugin(pcbnew.ActionPlugin, object): def __init__(self): super(KiZipPlugin, self).__init__() self.name", "log = logger pcb_file_name = os.path.basename(parser.file_name) pcb_file_dir = os.path.dirname(parser.file_name) pcbdata = parser.parse() file_list", "= re.sub(r'[?%*:|\"<>]', '_', name) return name + '.zip' class KiZipPlugin(pcbnew.ActionPlugin, object): def __init__(self):", "warn(self, msg): if self.cli: self.logger.warn(msg) else: wx.LogWarning(msg) log = None # type: Logger", "import ParsingException self.version = version board = pcbnew.GetBoard() pcb_file_name = board.GetFileName() config =", "def save_config(dialog_panel): config.set_from_dialog(dialog_panel) config.save() config.load_from_ini() dlg = SettingsDialog( config_save_func=save_config, file_name_format_hint=config.FILE_NAME_FORMAT_HINT, version=config.version ) try:", "object): def __init__(self): super(KiZipPlugin, self).__init__() self.name = \"Generate Gerber Package\" self.category = \"Read", ".config import Config from ..dialog import SettingsDialog from ..errors import ParsingException from .parser", "metadata['revision']) name = name.replace('%d', metadata['date'].replace(':', '-')) now = datetime.now() name = name.replace('%D', now.strftime('%Y-%m-%d'))", "Logger() if not pcb_file_name: logger.error('Please save the board file before generating gerbers') return", "%(levelname)s %(message)s\") ch.setFormatter(formatter) self.logger.addHandler(ch) def info(self, *args): if self.cli: self.logger.info(*args) def error(self, msg):", "logger.info(file_list) if os.path.isabs(config.output_dest_dir): output_file_dir = config.output_dest_dir else: output_file_dir = os.path.join(pcb_file_dir, config.output_dest_dir) output_file_name =", "dlg = SettingsDialog( config_save_func=save_config, file_name_format_hint=config.FILE_NAME_FORMAT_HINT, version=config.version ) try: config.transfer_to_dialog(dlg.panel) if dlg.ShowModal() == wx.ID_OK:", "import logging import wx import zipfile import shutil import pcbnew from .config import", "with zipfile.ZipFile(output_file_name, \"w\", zipfile.ZIP_DEFLATED) as zf: for filename in file_list: zf.write(filename=os.path.abspath(filename), arcname=os.path.basename(filename)) def", "before generating gerbers') return parser = Parser(pcb_file_name, config, logger, board) try: run_with_dialog(parser, config,", "try: run_with_dialog(parser, config, logger) except ParsingException as e: logger.error(str(e)) def main(parser, config, logger):", "config.set_from_dialog(dialog_panel) config.save() config.load_from_ini() dlg = SettingsDialog( config_save_func=save_config, file_name_format_hint=config.FILE_NAME_FORMAT_HINT, version=config.version ) try: config.transfer_to_dialog(dlg.panel) if", "self.pcbnew_icon_support = hasattr(self, \"show_toolbar_button\") self.show_toolbar_button = True icon_dir = os.path.dirname(os.path.dirname(__file__)) self.icon_file_name = os.path.join(icon_dir,", "zipfile import shutil import pcbnew from .config import Config from ..dialog import SettingsDialog", "import pcbnew from .config import Config from ..dialog import SettingsDialog from ..errors import", "= True icon_dir = os.path.dirname(os.path.dirname(__file__)) self.icon_file_name = os.path.join(icon_dir, 'icon.png') self.description = \"Generate Gerber", "version=config.version ) try: config.transfer_to_dialog(dlg.panel) if dlg.ShowModal() == wx.ID_OK: config.set_from_dialog(dlg.panel) main(parser, config, logger) finally:", "= os.path.join(icon_dir, 'icon.png') self.description = \"Generate Gerber Package\" def defaults(self): pass def Run(self):", "metadata['date'].replace(':', '-')) now = datetime.now() name = name.replace('%D', now.strftime('%Y-%m-%d')) name = name.replace('%T', now.strftime('%H-%M-%S'))", "= datetime.now() name = name.replace('%D', now.strftime('%Y-%m-%d')) name = name.replace('%T', now.strftime('%H-%M-%S')) # sanitize the", "info(self, *args): if self.cli: self.logger.info(*args) def error(self, msg): if self.cli: self.logger.error(msg) else: wx.MessageBox(msg)", "ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) formatter = logging.Formatter( \"%(asctime)-15s %(levelname)s %(message)s\") ch.setFormatter(formatter) self.logger.addHandler(ch) def", "None def save_config(dialog_panel): config.set_from_dialog(dialog_panel) config.save() config.load_from_ini() dlg = SettingsDialog( config_save_func=save_config, file_name_format_hint=config.FILE_NAME_FORMAT_HINT, version=config.version )", "= SettingsDialog( config_save_func=save_config, file_name_format_hint=config.FILE_NAME_FORMAT_HINT, version=config.version ) try: config.transfer_to_dialog(dlg.panel) if dlg.ShowModal() == wx.ID_OK: config.set_from_dialog(dlg.panel)", "super(KiZipPlugin, self).__init__() self.name = \"Generate Gerber Package\" self.category = \"Read PCB\" self.pcbnew_icon_support =", "datetime import logging import wx import zipfile import shutil import pcbnew from .config", "avoid characters illegal in file systems name = name.replace('\\\\', '/') name = re.sub(r'[?%*:|\"<>]',", "= None # type: Logger or None def process_substitutions(output_name_format, pcb_file_name, metadata): # type:", "def main(parser, config, logger): # type: (Parser, Config, Logger) -> None global log", "if self.cli: self.logger.warn(msg) else: wx.LogWarning(msg) log = None # type: Logger or None", "files with zipfile.ZipFile(output_file_name, \"w\", zipfile.ZIP_DEFLATED) as zf: for filename in file_list: zf.write(filename=os.path.abspath(filename), arcname=os.path.basename(filename))", "import version from ..errors import ParsingException self.version = version board = pcbnew.GetBoard() pcb_file_name", "= Config(self.version, os.path.dirname(pcb_file_name)) logger = Logger() if not pcb_file_name: logger.error('Please save the board", "import wx import zipfile import shutil import pcbnew from .config import Config from", "run_with_dialog(parser, config, logger): # type: (Parser, Config, Logger) -> None def save_config(dialog_panel): config.set_from_dialog(dialog_panel)", "'-')) now = datetime.now() name = name.replace('%D', now.strftime('%Y-%m-%d')) name = name.replace('%T', now.strftime('%H-%M-%S')) #", "hasattr(self, \"show_toolbar_button\") self.show_toolbar_button = True icon_dir = os.path.dirname(os.path.dirname(__file__)) self.icon_file_name = os.path.join(icon_dir, 'icon.png') self.description", "import Config from ..dialog import SettingsDialog from ..errors import ParsingException from .parser import", "exist_ok=True) #zip up all files with zipfile.ZipFile(output_file_name, \"w\", zipfile.ZIP_DEFLATED) as zf: for filename", ") try: config.transfer_to_dialog(dlg.panel) if dlg.ShowModal() == wx.ID_OK: config.set_from_dialog(dlg.panel) main(parser, config, logger) finally: dlg.Destroy()", "def defaults(self): pass def Run(self): from ..version import version from ..errors import ParsingException", "os.path.splitext(pcb_file_name)[0]) name = name.replace('%p', metadata['title']) name = name.replace('%c', metadata['company']) name = name.replace('%r', metadata['revision'])", "= name.replace('%D', now.strftime('%Y-%m-%d')) name = name.replace('%T', now.strftime('%H-%M-%S')) # sanitize the name to avoid", "board = pcbnew.GetBoard() pcb_file_name = board.GetFileName() config = Config(self.version, os.path.dirname(pcb_file_name)) logger = Logger()", "= logging.Formatter( \"%(asctime)-15s %(levelname)s %(message)s\") ch.setFormatter(formatter) self.logger.addHandler(ch) def info(self, *args): if self.cli: self.logger.info(*args)", "name.replace('%r', metadata['revision']) name = name.replace('%d', metadata['date'].replace(':', '-')) now = datetime.now() name = name.replace('%D',", "name = name.replace('%d', metadata['date'].replace(':', '-')) now = datetime.now() name = name.replace('%D', now.strftime('%Y-%m-%d')) name", "..dialog import SettingsDialog from ..errors import ParsingException from .parser import Parser class Logger(object):", "cli=False): self.cli = cli self.logger = logging.getLogger('KiZip') self.logger.setLevel(logging.INFO) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) formatter", "parser.plot() logger.info(file_list) if os.path.isabs(config.output_dest_dir): output_file_dir = config.output_dest_dir else: output_file_dir = os.path.join(pcb_file_dir, config.output_dest_dir) output_file_name", "if self.cli: self.logger.info(*args) def error(self, msg): if self.cli: self.logger.error(msg) else: wx.MessageBox(msg) def warn(self,", "up all files with zipfile.ZipFile(output_file_name, \"w\", zipfile.ZIP_DEFLATED) as zf: for filename in file_list:", "pcb_file_name, pcbdata['metadata']) output_file_name = os.path.join(output_file_dir, output_file_name) os.makedirs(output_file_dir, exist_ok=True) #zip up all files with", "self.show_toolbar_button = True icon_dir = os.path.dirname(os.path.dirname(__file__)) self.icon_file_name = os.path.join(icon_dir, 'icon.png') self.description = \"Generate", "name = name.replace('\\\\', '/') name = re.sub(r'[?%*:|\"<>]', '_', name) return name + '.zip'", "zipfile.ZIP_DEFLATED) as zf: for filename in file_list: zf.write(filename=os.path.abspath(filename), arcname=os.path.basename(filename)) def run_with_dialog(parser, config, logger):", "def info(self, *args): if self.cli: self.logger.info(*args) def error(self, msg): if self.cli: self.logger.error(msg) else:", "# sanitize the name to avoid characters illegal in file systems name =", "now.strftime('%H-%M-%S')) # sanitize the name to avoid characters illegal in file systems name", "def error(self, msg): if self.cli: self.logger.error(msg) else: wx.MessageBox(msg) def warn(self, msg): if self.cli:", "pcb_file_dir = os.path.dirname(parser.file_name) pcbdata = parser.parse() file_list = parser.plot() logger.info(file_list) if os.path.isabs(config.output_dest_dir): output_file_dir", "import shutil import pcbnew from .config import Config from ..dialog import SettingsDialog from", "from ..errors import ParsingException self.version = version board = pcbnew.GetBoard() pcb_file_name = board.GetFileName()", "wx import zipfile import shutil import pcbnew from .config import Config from ..dialog", "pcb_file_name, metadata): # type: (str, str, dict)->str name = output_name_format.replace('%f', os.path.splitext(pcb_file_name)[0]) name =", "the name to avoid characters illegal in file systems name = name.replace('\\\\', '/')", "= output_name_format.replace('%f', os.path.splitext(pcb_file_name)[0]) name = name.replace('%p', metadata['title']) name = name.replace('%c', metadata['company']) name =", "config_save_func=save_config, file_name_format_hint=config.FILE_NAME_FORMAT_HINT, version=config.version ) try: config.transfer_to_dialog(dlg.panel) if dlg.ShowModal() == wx.ID_OK: config.set_from_dialog(dlg.panel) main(parser, config,", "error(self, msg): if self.cli: self.logger.error(msg) else: wx.MessageBox(msg) def warn(self, msg): if self.cli: self.logger.warn(msg)", "datetime import datetime import logging import wx import zipfile import shutil import pcbnew", "logger.error('Please save the board file before generating gerbers') return parser = Parser(pcb_file_name, config,", "else: output_file_dir = os.path.join(pcb_file_dir, config.output_dest_dir) output_file_name = process_substitutions( config.output_name_format, pcb_file_name, pcbdata['metadata']) output_file_name =", "-> None def save_config(dialog_panel): config.set_from_dialog(dialog_panel) config.save() config.load_from_ini() dlg = SettingsDialog( config_save_func=save_config, file_name_format_hint=config.FILE_NAME_FORMAT_HINT, version=config.version", "import sys from datetime import datetime import logging import wx import zipfile import", "self.category = \"Read PCB\" self.pcbnew_icon_support = hasattr(self, \"show_toolbar_button\") self.show_toolbar_button = True icon_dir =", "'/') name = re.sub(r'[?%*:|\"<>]', '_', name) return name + '.zip' class KiZipPlugin(pcbnew.ActionPlugin, object):", "pcbdata['metadata']) output_file_name = os.path.join(output_file_dir, output_file_name) os.makedirs(output_file_dir, exist_ok=True) #zip up all files with zipfile.ZipFile(output_file_name,", "logger): # type: (Parser, Config, Logger) -> None global log log = logger", "logger): # type: (Parser, Config, Logger) -> None def save_config(dialog_panel): config.set_from_dialog(dialog_panel) config.save() config.load_from_ini()", "self.version = version board = pcbnew.GetBoard() pcb_file_name = board.GetFileName() config = Config(self.version, os.path.dirname(pcb_file_name))", "None def process_substitutions(output_name_format, pcb_file_name, metadata): # type: (str, str, dict)->str name = output_name_format.replace('%f',", "datetime.now() name = name.replace('%D', now.strftime('%Y-%m-%d')) name = name.replace('%T', now.strftime('%H-%M-%S')) # sanitize the name", "file before generating gerbers') return parser = Parser(pcb_file_name, config, logger, board) try: run_with_dialog(parser,", "= config.output_dest_dir else: output_file_dir = os.path.join(pcb_file_dir, config.output_dest_dir) output_file_name = process_substitutions( config.output_name_format, pcb_file_name, pcbdata['metadata'])", "pass def Run(self): from ..version import version from ..errors import ParsingException self.version =", "pcbnew from .config import Config from ..dialog import SettingsDialog from ..errors import ParsingException", "zipfile.ZipFile(output_file_name, \"w\", zipfile.ZIP_DEFLATED) as zf: for filename in file_list: zf.write(filename=os.path.abspath(filename), arcname=os.path.basename(filename)) def run_with_dialog(parser,", "name.replace('%p', metadata['title']) name = name.replace('%c', metadata['company']) name = name.replace('%r', metadata['revision']) name = name.replace('%d',", "import datetime import logging import wx import zipfile import shutil import pcbnew from", "name = name.replace('%p', metadata['title']) name = name.replace('%c', metadata['company']) name = name.replace('%r', metadata['revision']) name", "def run_with_dialog(parser, config, logger): # type: (Parser, Config, Logger) -> None def save_config(dialog_panel):", "defaults(self): pass def Run(self): from ..version import version from ..errors import ParsingException self.version", "name = name.replace('%c', metadata['company']) name = name.replace('%r', metadata['revision']) name = name.replace('%d', metadata['date'].replace(':', '-'))", "json import re import sys from datetime import datetime import logging import wx", "name.replace('%c', metadata['company']) name = name.replace('%r', metadata['revision']) name = name.replace('%d', metadata['date'].replace(':', '-')) now =", "Parser class Logger(object): def __init__(self, cli=False): self.cli = cli self.logger = logging.getLogger('KiZip') self.logger.setLevel(logging.INFO)", "= logging.getLogger('KiZip') self.logger.setLevel(logging.INFO) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) formatter = logging.Formatter( \"%(asctime)-15s %(levelname)s %(message)s\")", "file_name_format_hint=config.FILE_NAME_FORMAT_HINT, version=config.version ) try: config.transfer_to_dialog(dlg.panel) if dlg.ShowModal() == wx.ID_OK: config.set_from_dialog(dlg.panel) main(parser, config, logger)", "name = name.replace('%r', metadata['revision']) name = name.replace('%d', metadata['date'].replace(':', '-')) now = datetime.now() name", "ch.setFormatter(formatter) self.logger.addHandler(ch) def info(self, *args): if self.cli: self.logger.info(*args) def error(self, msg): if self.cli:", "self.icon_file_name = os.path.join(icon_dir, 'icon.png') self.description = \"Generate Gerber Package\" def defaults(self): pass def", "else: wx.MessageBox(msg) def warn(self, msg): if self.cli: self.logger.warn(msg) else: wx.LogWarning(msg) log = None", "= cli self.logger = logging.getLogger('KiZip') self.logger.setLevel(logging.INFO) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) formatter = logging.Formatter(", "in file_list: zf.write(filename=os.path.abspath(filename), arcname=os.path.basename(filename)) def run_with_dialog(parser, config, logger): # type: (Parser, Config, Logger)", "os.path.dirname(pcb_file_name)) logger = Logger() if not pcb_file_name: logger.error('Please save the board file before", "self.cli: self.logger.warn(msg) else: wx.LogWarning(msg) log = None # type: Logger or None def", "type: (str, str, dict)->str name = output_name_format.replace('%f', os.path.splitext(pcb_file_name)[0]) name = name.replace('%p', metadata['title']) name", "Gerber Package\" def defaults(self): pass def Run(self): from ..version import version from ..errors", "= name.replace('\\\\', '/') name = re.sub(r'[?%*:|\"<>]', '_', name) return name + '.zip' class", "logging.Formatter( \"%(asctime)-15s %(levelname)s %(message)s\") ch.setFormatter(formatter) self.logger.addHandler(ch) def info(self, *args): if self.cli: self.logger.info(*args) def", "= \"Generate Gerber Package\" def defaults(self): pass def Run(self): from ..version import version", "self.logger.warn(msg) else: wx.LogWarning(msg) log = None # type: Logger or None def process_substitutions(output_name_format,", "= \"Read PCB\" self.pcbnew_icon_support = hasattr(self, \"show_toolbar_button\") self.show_toolbar_button = True icon_dir = os.path.dirname(os.path.dirname(__file__))", "name = name.replace('%T', now.strftime('%H-%M-%S')) # sanitize the name to avoid characters illegal in", "if os.path.isabs(config.output_dest_dir): output_file_dir = config.output_dest_dir else: output_file_dir = os.path.join(pcb_file_dir, config.output_dest_dir) output_file_name = process_substitutions(", "logger pcb_file_name = os.path.basename(parser.file_name) pcb_file_dir = os.path.dirname(parser.file_name) pcbdata = parser.parse() file_list = parser.plot()", "(Parser, Config, Logger) -> None global log log = logger pcb_file_name = os.path.basename(parser.file_name)", "pcbnew.GetBoard() pcb_file_name = board.GetFileName() config = Config(self.version, os.path.dirname(pcb_file_name)) logger = Logger() if not", "config.output_name_format, pcb_file_name, pcbdata['metadata']) output_file_name = os.path.join(output_file_dir, output_file_name) os.makedirs(output_file_dir, exist_ok=True) #zip up all files", "os.path.isabs(config.output_dest_dir): output_file_dir = config.output_dest_dir else: output_file_dir = os.path.join(pcb_file_dir, config.output_dest_dir) output_file_name = process_substitutions( config.output_name_format,", "sanitize the name to avoid characters illegal in file systems name = name.replace('\\\\',", "'.zip' class KiZipPlugin(pcbnew.ActionPlugin, object): def __init__(self): super(KiZipPlugin, self).__init__() self.name = \"Generate Gerber Package\"", "metadata['company']) name = name.replace('%r', metadata['revision']) name = name.replace('%d', metadata['date'].replace(':', '-')) now = datetime.now()", "logger = Logger() if not pcb_file_name: logger.error('Please save the board file before generating", "name.replace('%T', now.strftime('%H-%M-%S')) # sanitize the name to avoid characters illegal in file systems", "e: logger.error(str(e)) def main(parser, config, logger): # type: (Parser, Config, Logger) -> None", "import json import re import sys from datetime import datetime import logging import", "name = output_name_format.replace('%f', os.path.splitext(pcb_file_name)[0]) name = name.replace('%p', metadata['title']) name = name.replace('%c', metadata['company']) name", "illegal in file systems name = name.replace('\\\\', '/') name = re.sub(r'[?%*:|\"<>]', '_', name)", "Config from ..dialog import SettingsDialog from ..errors import ParsingException from .parser import Parser", "config, logger): # type: (Parser, Config, Logger) -> None global log log =", "the board file before generating gerbers') return parser = Parser(pcb_file_name, config, logger, board)", "metadata['title']) name = name.replace('%c', metadata['company']) name = name.replace('%r', metadata['revision']) name = name.replace('%d', metadata['date'].replace(':',", "(Parser, Config, Logger) -> None def save_config(dialog_panel): config.set_from_dialog(dialog_panel) config.save() config.load_from_ini() dlg = SettingsDialog(", "self.cli: self.logger.error(msg) else: wx.MessageBox(msg) def warn(self, msg): if self.cli: self.logger.warn(msg) else: wx.LogWarning(msg) log", "\"show_toolbar_button\") self.show_toolbar_button = True icon_dir = os.path.dirname(os.path.dirname(__file__)) self.icon_file_name = os.path.join(icon_dir, 'icon.png') self.description =", "version from ..errors import ParsingException self.version = version board = pcbnew.GetBoard() pcb_file_name =", "parser = Parser(pcb_file_name, config, logger, board) try: run_with_dialog(parser, config, logger) except ParsingException as", "wx.LogWarning(msg) log = None # type: Logger or None def process_substitutions(output_name_format, pcb_file_name, metadata):", "return parser = Parser(pcb_file_name, config, logger, board) try: run_with_dialog(parser, config, logger) except ParsingException", "type: (Parser, Config, Logger) -> None def save_config(dialog_panel): config.set_from_dialog(dialog_panel) config.save() config.load_from_ini() dlg =", "pcb_file_name = os.path.basename(parser.file_name) pcb_file_dir = os.path.dirname(parser.file_name) pcbdata = parser.parse() file_list = parser.plot() logger.info(file_list)", "re import sys from datetime import datetime import logging import wx import zipfile", "from ..dialog import SettingsDialog from ..errors import ParsingException from .parser import Parser class", "\"Read PCB\" self.pcbnew_icon_support = hasattr(self, \"show_toolbar_button\") self.show_toolbar_button = True icon_dir = os.path.dirname(os.path.dirname(__file__)) self.icon_file_name", "#zip up all files with zipfile.ZipFile(output_file_name, \"w\", zipfile.ZIP_DEFLATED) as zf: for filename in", "# type: (Parser, Config, Logger) -> None def save_config(dialog_panel): config.set_from_dialog(dialog_panel) config.save() config.load_from_ini() dlg", "os.path.dirname(os.path.dirname(__file__)) self.icon_file_name = os.path.join(icon_dir, 'icon.png') self.description = \"Generate Gerber Package\" def defaults(self): pass", "def process_substitutions(output_name_format, pcb_file_name, metadata): # type: (str, str, dict)->str name = output_name_format.replace('%f', os.path.splitext(pcb_file_name)[0])", "class Logger(object): def __init__(self, cli=False): self.cli = cli self.logger = logging.getLogger('KiZip') self.logger.setLevel(logging.INFO) ch", "Run(self): from ..version import version from ..errors import ParsingException self.version = version board", "import SettingsDialog from ..errors import ParsingException from .parser import Parser class Logger(object): def", "import Parser class Logger(object): def __init__(self, cli=False): self.cli = cli self.logger = logging.getLogger('KiZip')", "= name.replace('%d', metadata['date'].replace(':', '-')) now = datetime.now() name = name.replace('%D', now.strftime('%Y-%m-%d')) name =", "output_file_name) os.makedirs(output_file_dir, exist_ok=True) #zip up all files with zipfile.ZipFile(output_file_name, \"w\", zipfile.ZIP_DEFLATED) as zf:", "file systems name = name.replace('\\\\', '/') name = re.sub(r'[?%*:|\"<>]', '_', name) return name", "config, logger, board) try: run_with_dialog(parser, config, logger) except ParsingException as e: logger.error(str(e)) def", "cli self.logger = logging.getLogger('KiZip') self.logger.setLevel(logging.INFO) ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) formatter = logging.Formatter( \"%(asctime)-15s", "__init__(self): super(KiZipPlugin, self).__init__() self.name = \"Generate Gerber Package\" self.category = \"Read PCB\" self.pcbnew_icon_support", "import re import sys from datetime import datetime import logging import wx import", "= os.path.dirname(os.path.dirname(__file__)) self.icon_file_name = os.path.join(icon_dir, 'icon.png') self.description = \"Generate Gerber Package\" def defaults(self):", "= parser.plot() logger.info(file_list) if os.path.isabs(config.output_dest_dir): output_file_dir = config.output_dest_dir else: output_file_dir = os.path.join(pcb_file_dir, config.output_dest_dir)", "Config, Logger) -> None def save_config(dialog_panel): config.set_from_dialog(dialog_panel) config.save() config.load_from_ini() dlg = SettingsDialog( config_save_func=save_config,", "from ..errors import ParsingException from .parser import Parser class Logger(object): def __init__(self, cli=False):", "+ '.zip' class KiZipPlugin(pcbnew.ActionPlugin, object): def __init__(self): super(KiZipPlugin, self).__init__() self.name = \"Generate Gerber", "os import json import re import sys from datetime import datetime import logging", "Package\" self.category = \"Read PCB\" self.pcbnew_icon_support = hasattr(self, \"show_toolbar_button\") self.show_toolbar_button = True icon_dir", "all files with zipfile.ZipFile(output_file_name, \"w\", zipfile.ZIP_DEFLATED) as zf: for filename in file_list: zf.write(filename=os.path.abspath(filename),", "config = Config(self.version, os.path.dirname(pcb_file_name)) logger = Logger() if not pcb_file_name: logger.error('Please save the", "in file systems name = name.replace('\\\\', '/') name = re.sub(r'[?%*:|\"<>]', '_', name) return", "output_file_name = process_substitutions( config.output_name_format, pcb_file_name, pcbdata['metadata']) output_file_name = os.path.join(output_file_dir, output_file_name) os.makedirs(output_file_dir, exist_ok=True) #zip", "os.path.join(pcb_file_dir, config.output_dest_dir) output_file_name = process_substitutions( config.output_name_format, pcb_file_name, pcbdata['metadata']) output_file_name = os.path.join(output_file_dir, output_file_name) os.makedirs(output_file_dir,", "Config(self.version, os.path.dirname(pcb_file_name)) logger = Logger() if not pcb_file_name: logger.error('Please save the board file", "not pcb_file_name: logger.error('Please save the board file before generating gerbers') return parser =", "save the board file before generating gerbers') return parser = Parser(pcb_file_name, config, logger,", "board) try: run_with_dialog(parser, config, logger) except ParsingException as e: logger.error(str(e)) def main(parser, config,", "wx.MessageBox(msg) def warn(self, msg): if self.cli: self.logger.warn(msg) else: wx.LogWarning(msg) log = None #", "SettingsDialog from ..errors import ParsingException from .parser import Parser class Logger(object): def __init__(self,", "now.strftime('%Y-%m-%d')) name = name.replace('%T', now.strftime('%H-%M-%S')) # sanitize the name to avoid characters illegal", "now = datetime.now() name = name.replace('%D', now.strftime('%Y-%m-%d')) name = name.replace('%T', now.strftime('%H-%M-%S')) # sanitize", "import ParsingException from .parser import Parser class Logger(object): def __init__(self, cli=False): self.cli =", "= parser.parse() file_list = parser.plot() logger.info(file_list) if os.path.isabs(config.output_dest_dir): output_file_dir = config.output_dest_dir else: output_file_dir", "Logger) -> None global log log = logger pcb_file_name = os.path.basename(parser.file_name) pcb_file_dir =", "metadata): # type: (str, str, dict)->str name = output_name_format.replace('%f', os.path.splitext(pcb_file_name)[0]) name = name.replace('%p',", "\"Generate Gerber Package\" def defaults(self): pass def Run(self): from ..version import version from", "logger.error(str(e)) def main(parser, config, logger): # type: (Parser, Config, Logger) -> None global", "config, logger): # type: (Parser, Config, Logger) -> None def save_config(dialog_panel): config.set_from_dialog(dialog_panel) config.save()", "Parser(pcb_file_name, config, logger, board) try: run_with_dialog(parser, config, logger) except ParsingException as e: logger.error(str(e))", "SettingsDialog( config_save_func=save_config, file_name_format_hint=config.FILE_NAME_FORMAT_HINT, version=config.version ) try: config.transfer_to_dialog(dlg.panel) if dlg.ShowModal() == wx.ID_OK: config.set_from_dialog(dlg.panel) main(parser,", "Logger(object): def __init__(self, cli=False): self.cli = cli self.logger = logging.getLogger('KiZip') self.logger.setLevel(logging.INFO) ch =", "parser.parse() file_list = parser.plot() logger.info(file_list) if os.path.isabs(config.output_dest_dir): output_file_dir = config.output_dest_dir else: output_file_dir =", "zf.write(filename=os.path.abspath(filename), arcname=os.path.basename(filename)) def run_with_dialog(parser, config, logger): # type: (Parser, Config, Logger) -> None", "None # type: Logger or None def process_substitutions(output_name_format, pcb_file_name, metadata): # type: (str,", "-> None global log log = logger pcb_file_name = os.path.basename(parser.file_name) pcb_file_dir = os.path.dirname(parser.file_name)", "gerbers') return parser = Parser(pcb_file_name, config, logger, board) try: run_with_dialog(parser, config, logger) except", "config.load_from_ini() dlg = SettingsDialog( config_save_func=save_config, file_name_format_hint=config.FILE_NAME_FORMAT_HINT, version=config.version ) try: config.transfer_to_dialog(dlg.panel) if dlg.ShowModal() ==", "ParsingException self.version = version board = pcbnew.GetBoard() pcb_file_name = board.GetFileName() config = Config(self.version,", "Logger) -> None def save_config(dialog_panel): config.set_from_dialog(dialog_panel) config.save() config.load_from_ini() dlg = SettingsDialog( config_save_func=save_config, file_name_format_hint=config.FILE_NAME_FORMAT_HINT,", "name.replace('\\\\', '/') name = re.sub(r'[?%*:|\"<>]', '_', name) return name + '.zip' class KiZipPlugin(pcbnew.ActionPlugin,", "= name.replace('%c', metadata['company']) name = name.replace('%r', metadata['revision']) name = name.replace('%d', metadata['date'].replace(':', '-')) now", "def Run(self): from ..version import version from ..errors import ParsingException self.version = version", "log log = logger pcb_file_name = os.path.basename(parser.file_name) pcb_file_dir = os.path.dirname(parser.file_name) pcbdata = parser.parse()", "pcb_file_name = board.GetFileName() config = Config(self.version, os.path.dirname(pcb_file_name)) logger = Logger() if not pcb_file_name:", "os.makedirs(output_file_dir, exist_ok=True) #zip up all files with zipfile.ZipFile(output_file_name, \"w\", zipfile.ZIP_DEFLATED) as zf: for", "logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) formatter = logging.Formatter( \"%(asctime)-15s %(levelname)s %(message)s\") ch.setFormatter(formatter) self.logger.addHandler(ch) def info(self, *args):", "ParsingException from .parser import Parser class Logger(object): def __init__(self, cli=False): self.cli = cli", "= os.path.dirname(parser.file_name) pcbdata = parser.parse() file_list = parser.plot() logger.info(file_list) if os.path.isabs(config.output_dest_dir): output_file_dir =", "PCB\" self.pcbnew_icon_support = hasattr(self, \"show_toolbar_button\") self.show_toolbar_button = True icon_dir = os.path.dirname(os.path.dirname(__file__)) self.icon_file_name =", "self.description = \"Generate Gerber Package\" def defaults(self): pass def Run(self): from ..version import", "from .config import Config from ..dialog import SettingsDialog from ..errors import ParsingException from", "shutil import pcbnew from .config import Config from ..dialog import SettingsDialog from ..errors", "= name.replace('%p', metadata['title']) name = name.replace('%c', metadata['company']) name = name.replace('%r', metadata['revision']) name =", "as zf: for filename in file_list: zf.write(filename=os.path.abspath(filename), arcname=os.path.basename(filename)) def run_with_dialog(parser, config, logger): #", "for filename in file_list: zf.write(filename=os.path.abspath(filename), arcname=os.path.basename(filename)) def run_with_dialog(parser, config, logger): # type: (Parser,", "if self.cli: self.logger.error(msg) else: wx.MessageBox(msg) def warn(self, msg): if self.cli: self.logger.warn(msg) else: wx.LogWarning(msg)", "global log log = logger pcb_file_name = os.path.basename(parser.file_name) pcb_file_dir = os.path.dirname(parser.file_name) pcbdata =", "= logger pcb_file_name = os.path.basename(parser.file_name) pcb_file_dir = os.path.dirname(parser.file_name) pcbdata = parser.parse() file_list =", "self.logger.info(*args) def error(self, msg): if self.cli: self.logger.error(msg) else: wx.MessageBox(msg) def warn(self, msg): if", ".parser import Parser class Logger(object): def __init__(self, cli=False): self.cli = cli self.logger =", "= process_substitutions( config.output_name_format, pcb_file_name, pcbdata['metadata']) output_file_name = os.path.join(output_file_dir, output_file_name) os.makedirs(output_file_dir, exist_ok=True) #zip up", "name.replace('%D', now.strftime('%Y-%m-%d')) name = name.replace('%T', now.strftime('%H-%M-%S')) # sanitize the name to avoid characters", "= \"Generate Gerber Package\" self.category = \"Read PCB\" self.pcbnew_icon_support = hasattr(self, \"show_toolbar_button\") self.show_toolbar_button", "= name.replace('%T', now.strftime('%H-%M-%S')) # sanitize the name to avoid characters illegal in file", "log = None # type: Logger or None def process_substitutions(output_name_format, pcb_file_name, metadata): #", "run_with_dialog(parser, config, logger) except ParsingException as e: logger.error(str(e)) def main(parser, config, logger): #", "to avoid characters illegal in file systems name = name.replace('\\\\', '/') name =", "type: (Parser, Config, Logger) -> None global log log = logger pcb_file_name =", "name = re.sub(r'[?%*:|\"<>]', '_', name) return name + '.zip' class KiZipPlugin(pcbnew.ActionPlugin, object): def" ]
[ "import GuidedBraTSDataset3D # from loss.FALoss3D import FALoss3D import cv2 from loss.TaskFusionLoss import TaskFusionLoss", "of the lossfunc') parser.add_argument('-load_pretrained',type=str,default='',help='load a pretrained model') parser.add_argument('-v', help=\"increase output verbosity\", action=\"store_true\") args", "lossfunc') parser.add_argument('-w_tf', type=float, default=0.5, help='w_tf of the lossfunc') parser.add_argument('-load_pretrained',type=str,default='',help='load a pretrained model') parser.add_argument('-v',", "= pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) labels_seg = pt.autograd.Variable(labels_seg).type(pt.FloatTensor).cuda().unsqueeze(1) labels_sr = pt.autograd.Variable(labels_sr).type(pt.FloatTensor).cuda().unsqueeze(1) outputs_seg,outputs_sr = model(inputs,guidance) loss_seg =", "from dataset.GuidedBraTSDataset3D import GuidedBraTSDataset3D # from loss.FALoss3D import FALoss3D import cv2 from loss.TaskFusionLoss", "1]) dice = 2 * pr_gt_sum / (pr_sum + gt_sum) dice_sum += dice", "= pt.autograd.Variable(inputs).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) labels_seg = pt.autograd.Variable(labels_seg).type(pt.FloatTensor).cuda().unsqueeze(1) labels_sr =", "help='w_tf of the lossfunc') parser.add_argument('-load_pretrained',type=str,default='',help='load a pretrained model') parser.add_argument('-v', help=\"increase output verbosity\", action=\"store_true\")", "hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Total dice: \",dice_sum/len(val_dataset),'\\n') print(\"Finished. Avg Jaccard: \",jc_sum/len(val_dataset)) print(\"Finished.", "loss=lossfunc_dice(outputs_seg,labels_seg)+loss_seg+w_sr*(loss_sr+loss_guide)+w_tf*loss_pf loss.backward() optimizer.step() loss_sum+=loss.item() if args.v: final_img=np.zeros(shape=(2*size,2*size*5)) iterator.set_postfix(loss=loss.item(),loss_seg=loss_seg.item(),loss_sr=loss_sr.item()) final_img[:,0:(2*size)]=outputs_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(2*size):(4*size)]=outputs_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(4*size):(6*size)]=labels_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(6*size):(8*size)]=labels_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(8*size):]=cv2.resize(inputs.cpu().data.numpy()[0,0,size//4,:,:],((2*size),(2*size)))*255", "optimizer.step() loss_sum+=loss.item() if args.v: final_img=np.zeros(shape=(2*size,2*size*5)) iterator.set_postfix(loss=loss.item(),loss_seg=loss_seg.item(),loss_sr=loss_sr.item()) final_img[:,0:(2*size)]=outputs_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(2*size):(4*size)]=outputs_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(4*size):(6*size)]=labels_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(6*size):(8*size)]=labels_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(8*size):]=cv2.resize(inputs.cpu().data.numpy()[0,0,size//4,:,:],((2*size),(2*size)))*255 cv2.imwrite('combine.png',final_img) print('==>End", "= pt.autograd.Variable(inputs[:,a:(a+crop_size[0]),b:(b+crop_size[1]),c:(c+crop_size[2])]).type(pt.FloatTensor).cuda().unsqueeze(1) with pt.no_grad(): outputs3D,_ = model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1", "for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in enumerate(test_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data", "3D Medical Image Segmentation.') parser.add_argument('-dataset_path',type=str,default='/newdata/why/BraTS20',help='path to dataset') parser.add_argument('-model_save_to',type=str,default='.',help='path to output') parser.add_argument('-bs', type=int, default=1,", "= pt.autograd.Variable(labels_sr).type(pt.FloatTensor).cuda().unsqueeze(1) outputs_seg,outputs_sr = model(inputs,guidance) loss_seg = lossfunc_seg(outputs_seg, labels_seg) loss_sr = lossfunc_sr(outputs_sr, labels_sr)", "i,data in enumerate(test_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask", "pr_sum = output_list.sum() gt_sum = label_list.sum() pr_gt_sum = np.sum(output_list[label_list == 1]) dice =", "the lossfunc') parser.add_argument('-load_pretrained',type=str,default='',help='load a pretrained model') parser.add_argument('-v', help=\"increase output verbosity\", action=\"store_true\") args =", "model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for b", "= output_list.sum() gt_sum = label_list.sum() pr_gt_sum = np.sum(output_list[label_list == 1]) dice = 2", "lossfunc_seg(outputs_seg, labels_seg) loss_sr = lossfunc_sr(outputs_sr, labels_sr) loss_pf = lossfunc_pf(outputs_seg,outputs_sr,labels_seg*labels_sr) loss_guide=lossfunc_sr(mask*outputs_sr,mask*labels_sr) loss=lossfunc_dice(outputs_seg,labels_seg)+loss_seg+w_sr*(loss_sr+loss_guide)+w_tf*loss_pf loss.backward() optimizer.step()", "= model(inputs,guidance) loss_seg = lossfunc_seg(outputs_seg, labels_seg) loss_sr = lossfunc_sr(outputs_sr, labels_sr) loss_pf = lossfunc_pf(outputs_seg,outputs_sr,labels_seg*labels_sr)", "optimizer.zero_grad() inputs = pt.autograd.Variable(inputs).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) labels_seg = pt.autograd.Variable(labels_seg).type(pt.FloatTensor).cuda().unsqueeze(1)", "output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 pr_sum = output_list.sum() gt_sum = label_list.sum() pr_gt_sum = np.sum(output_list[label_list == 1])", "pt.optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.99) scheduler=pt.optim.lr_scheduler.ReduceLROnPlateau(optimizer,mode='max',patience=20) def ValModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a in", "/ (pr_sum + gt_sum) dice_sum += dice hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Test", "lossfunc_sr=pt.nn.MSELoss() lossfunc_seg=pt.nn.BCELoss() lossfunc_dice=BinaryDiceLoss() lossfunc_pf=TaskFusionLoss() optimizer = pt.optim.Adam(model.parameters(), lr=lr) # # scheduler = pt.optim.lr_scheduler.ExponentialLR(optimizer,", "labels_seg = pt.autograd.Variable(labels_seg).type(pt.FloatTensor).cuda().unsqueeze(1) labels_sr = pt.autograd.Variable(labels_sr).type(pt.FloatTensor).cuda().unsqueeze(1) outputs_seg,outputs_sr = model(inputs,guidance) loss_seg = lossfunc_seg(outputs_seg, labels_seg)", "w_sr=args.w_sr w_tf=args.w_tf pretrained_model=args.load_pretrained print(args) model=PFSeg3D(in_channels=1,out_channels=1).cuda() if pt.cuda.device_count()>1: if batch_size<pt.cuda.device_count(): batch_size=pt.cuda.device_count() print('Batch size has", "from loss.FALoss3D import FALoss3D import cv2 from loss.TaskFusionLoss import TaskFusionLoss from loss.DiceLoss import", "outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('TestPhase_BraTS.png',final_img) pr_sum = output_list.sum()", "if not pretrained_model=='': model.load_state_dict(pt.load(pretrained_model,map_location = 'cpu')) trainset=GuidedBraTSDataset3D(dataset_path,mode='train') valset=GuidedBraTSDataset3D(dataset_path,mode='val') testset=GuidedBraTSDataset3D(dataset_path,mode='test') train_dataset=pt.utils.data.DataLoader(trainset,batch_size=batch_size,shuffle=True,drop_last=True) val_dataset=pt.utils.data.DataLoader(valset,batch_size=1,shuffle=True,drop_last=True) test_dataset=pt.utils.data.DataLoader(testset,batch_size=1,shuffle=True,drop_last=True) lossfunc_sr=pt.nn.MSELoss()", "default=0.5, help='w_tf of the lossfunc') parser.add_argument('-load_pretrained',type=str,default='',help='load a pretrained model') parser.add_argument('-v', help=\"increase output verbosity\",", "(inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) for a in", "= np.sum(output_list[label_list == 1]) dice = 2 * pr_gt_sum / (pr_sum + gt_sum)", "than GPU#. Set to {:d} instead.'.format(batch_size)) model=pt.nn.DataParallel(model) if not pretrained_model=='': model.load_state_dict(pt.load(pretrained_model,map_location = 'cpu'))", "pt.cuda.device_count()>1: if batch_size<pt.cuda.device_count(): batch_size=pt.cuda.device_count() print('Batch size has to be larger than GPU#. Set", "type=float, default=0.0001, help='learning rate') parser.add_argument('-w_sr', type=float, default=0.5, help='w_sr of the lossfunc') parser.add_argument('-w_tf', type=float,", "def TestModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5", "GPU#. Set to {:d} instead.'.format(batch_size)) model=pt.nn.DataParallel(model) if not pretrained_model=='': model.load_state_dict(pt.load(pretrained_model,map_location = 'cpu')) trainset=GuidedBraTSDataset3D(dataset_path,mode='train')", "optimizer = pt.optim.Adam(model.parameters(), lr=lr) # # scheduler = pt.optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.99) scheduler=pt.optim.lr_scheduler.ReduceLROnPlateau(optimizer,mode='max',patience=20) def ValModel():", "args.v: final_img=np.zeros(shape=(2*size,2*size*5)) iterator.set_postfix(loss=loss.item(),loss_seg=loss_seg.item(),loss_sr=loss_sr.item()) final_img[:,0:(2*size)]=outputs_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(2*size):(4*size)]=outputs_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(4*size):(6*size)]=labels_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(6*size):(8*size)]=labels_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(8*size):]=cv2.resize(inputs.cpu().data.numpy()[0,0,size//4,:,:],((2*size),(2*size)))*255 cv2.imwrite('combine.png',final_img) print('==>End of epoch',x,'==>\\n') print('===VAL===>')", "has to be larger than GPU#. Set to {:d} instead.'.format(batch_size)) model=pt.nn.DataParallel(model) if not", "pr_gt_sum / (pr_sum + gt_sum) dice_sum += dice if args.v: final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255", "overlap0.5 for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): inputs3D = pt.autograd.Variable(inputs[:,a:(a+crop_size[0]),b:(b+crop_size[1]),c:(c+crop_size[2])]).type(pt.FloatTensor).cuda().unsqueeze(1) with", "epochs') parser.add_argument('-lr', type=float, default=0.0001, help='learning rate') parser.add_argument('-w_sr', type=float, default=0.5, help='w_sr of the lossfunc')", "lr=lr) # # scheduler = pt.optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.99) scheduler=pt.optim.lr_scheduler.ReduceLROnPlateau(optimizer,mode='max',patience=20) def ValModel(): model.eval() dice_sum=0 hd_sum=0", "dice_sum += dice hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Test Total dice: \",dice_sum/len(test_dataset),'\\n') print(\"Finished.", "print(\"Finished. Test Total dice: \",dice_sum/len(test_dataset),'\\n') print(\"Finished. Test Avg Jaccard: \",jc_sum/len(test_dataset)) print(\"Finished. Test Avg", "import tqdm # from tensorboardX import SummaryWriter crop_size=config.crop_size size=crop_size[2] img_size=config.input_img_size parser = argparse.ArgumentParser(description='Patch-free", "epoch=args.epoch batch_size=args.bs model_path=args.model_save_to w_sr=args.w_sr w_tf=args.w_tf pretrained_model=args.load_pretrained print(args) model=PFSeg3D(in_channels=1,out_channels=1).cuda() if pt.cuda.device_count()>1: if batch_size<pt.cuda.device_count(): batch_size=pt.cuda.device_count()", "parser = argparse.ArgumentParser(description='Patch-free 3D Medical Image Segmentation.') parser.add_argument('-dataset_path',type=str,default='/newdata/why/BraTS20',help='path to dataset') parser.add_argument('-model_save_to',type=str,default='.',help='path to output')", "import BinaryDiceLoss from config import config import argparse from tqdm import tqdm #", "to {:d} instead.'.format(batch_size)) model=pt.nn.DataParallel(model) if not pretrained_model=='': model.load_state_dict(pt.load(pretrained_model,map_location = 'cpu')) trainset=GuidedBraTSDataset3D(dataset_path,mode='train') valset=GuidedBraTSDataset3D(dataset_path,mode='val') testset=GuidedBraTSDataset3D(dataset_path,mode='test')", "type=int, default=1, help='input batch size') parser.add_argument('-epoch', type=int, default=100, help='number of epochs') parser.add_argument('-lr', type=float,", "larger than GPU#. Set to {:d} instead.'.format(batch_size)) model=pt.nn.DataParallel(model) if not pretrained_model=='': model.load_state_dict(pt.load(pretrained_model,map_location =", "inputs3D = pt.autograd.Variable(inputs[:,a:(a+crop_size[0]),b:(b+crop_size[1]),c:(c+crop_size[2])]).type(pt.FloatTensor).cuda().unsqueeze(1) with pt.no_grad(): outputs3D,_ = model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0", "model_path=args.model_save_to w_sr=args.w_sr w_tf=args.w_tf pretrained_model=args.load_pretrained print(args) model=PFSeg3D(in_channels=1,out_channels=1).cuda() if pt.cuda.device_count()>1: if batch_size<pt.cuda.device_count(): batch_size=pt.cuda.device_count() print('Batch size", "a pretrained model') parser.add_argument('-v', help=\"increase output verbosity\", action=\"store_true\") args = parser.parse_args() dataset_path=args.dataset_path lr=args.lr", "= label_list.sum() pr_gt_sum = np.sum(output_list[label_list == 1]) dice = 2 * pr_gt_sum /", "(pr_sum + gt_sum) dice_sum += dice hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Test Total", "config import argparse from tqdm import tqdm # from tensorboardX import SummaryWriter crop_size=config.crop_size", "model(inputs,guidance) loss_seg = lossfunc_seg(outputs_seg, labels_seg) loss_sr = lossfunc_sr(outputs_sr, labels_sr) loss_pf = lossfunc_pf(outputs_seg,outputs_sr,labels_seg*labels_sr) loss_guide=lossfunc_sr(mask*outputs_sr,mask*labels_sr)", "if pt.cuda.device_count()>1: if batch_size<pt.cuda.device_count(): batch_size=pt.cuda.device_count() print('Batch size has to be larger than GPU#.", "= lossfunc_pf(outputs_seg,outputs_sr,labels_seg*labels_sr) loss_guide=lossfunc_sr(mask*outputs_sr,mask*labels_sr) loss=lossfunc_dice(outputs_seg,labels_seg)+loss_seg+w_sr*(loss_sr+loss_guide)+w_tf*loss_pf loss.backward() optimizer.step() loss_sum+=loss.item() if args.v: final_img=np.zeros(shape=(2*size,2*size*5)) iterator.set_postfix(loss=loss.item(),loss_seg=loss_seg.item(),loss_sr=loss_sr.item()) final_img[:,0:(2*size)]=outputs_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(2*size):(4*size)]=outputs_sr.cpu().data.numpy()[0,0,size//2,:,:]*255", "output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('TestPhase_BraTS.png',final_img) pr_sum = output_list.sum() gt_sum = label_list.sum()", "label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) for a", "Avg hausdorff: \",hd_sum/len(val_dataset)) return dice_sum/len(val_dataset) def TestModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for", "c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in enumerate(test_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D", "pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5", "in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in enumerate(val_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2]))", "dataset.GuidedBraTSDataset3D import GuidedBraTSDataset3D # from loss.FALoss3D import FALoss3D import cv2 from loss.TaskFusionLoss import", "lr=args.lr epoch=args.epoch batch_size=args.bs model_path=args.model_save_to w_sr=args.w_sr w_tf=args.w_tf pretrained_model=args.load_pretrained print(args) model=PFSeg3D(in_channels=1,out_channels=1).cuda() if pt.cuda.device_count()>1: if batch_size<pt.cuda.device_count():", "pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c", "2 * pr_gt_sum / (pr_sum + gt_sum) dice_sum += dice if args.v: final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2]))", "parser.add_argument('-lr', type=float, default=0.0001, help='learning rate') parser.add_argument('-w_sr', type=float, default=0.5, help='w_sr of the lossfunc') parser.add_argument('-w_tf',", "dice_sum += dice if args.v: final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('ValPhase_BraTS.png',final_img) print(\"dice:\",dice) hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff", "batch_size=args.bs model_path=args.model_save_to w_sr=args.w_sr w_tf=args.w_tf pretrained_model=args.load_pretrained print(args) model=PFSeg3D(in_channels=1,out_channels=1).cuda() if pt.cuda.device_count()>1: if batch_size<pt.cuda.device_count(): batch_size=pt.cuda.device_count() print('Batch", "config import config import argparse from tqdm import tqdm # from tensorboardX import", "+ gt_sum) dice_sum += dice if args.v: final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('ValPhase_BraTS.png',final_img) print(\"dice:\",dice) hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0))", "\",hd_sum/len(test_dataset)) return dice_sum/len(test_dataset) best_dice=0 iterator=tqdm(train_dataset, ncols=100) for x in range(epoch): model.train() loss_sum=0 print('\\n==>Epoch',x,':", "loss.FALoss3D import FALoss3D import cv2 from loss.TaskFusionLoss import TaskFusionLoss from loss.DiceLoss import BinaryDiceLoss", "crop_size=config.crop_size size=crop_size[2] img_size=config.input_img_size parser = argparse.ArgumentParser(description='Patch-free 3D Medical Image Segmentation.') parser.add_argument('-dataset_path',type=str,default='/newdata/why/BraTS20',help='path to dataset')", "best_dice=dice print('New best dice! Model saved to',model_path+'/PFSeg_3D_BraTS_patch-free_bs'+str(batch_size)+'_best.pt') pt.save(model.state_dict(), model_path+'/PFSeg_3D_BraTS_patch-free_bs'+str(batch_size)+'_best.pt') print('===TEST===>') TestModel() print('\\nBest Dice:',best_dice)", "not pretrained_model=='': model.load_state_dict(pt.load(pretrained_model,map_location = 'cpu')) trainset=GuidedBraTSDataset3D(dataset_path,mode='train') valset=GuidedBraTSDataset3D(dataset_path,mode='val') testset=GuidedBraTSDataset3D(dataset_path,mode='test') train_dataset=pt.utils.data.DataLoader(trainset,batch_size=batch_size,shuffle=True,drop_last=True) val_dataset=pt.utils.data.DataLoader(valset,batch_size=1,shuffle=True,drop_last=True) test_dataset=pt.utils.data.DataLoader(testset,batch_size=1,shuffle=True,drop_last=True) lossfunc_sr=pt.nn.MSELoss() lossfunc_seg=pt.nn.BCELoss()", "model.PFSeg import PFSeg3D from medpy.metric.binary import jc,hd95 from dataset.GuidedBraTSDataset3D import GuidedBraTSDataset3D # from", "output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('TestPhase_BraTS.png',final_img) pr_sum = output_list.sum() gt_sum", "PFSeg3D from medpy.metric.binary import jc,hd95 from dataset.GuidedBraTSDataset3D import GuidedBraTSDataset3D # from loss.FALoss3D import", "from model.PFSeg import PFSeg3D from medpy.metric.binary import jc,hd95 from dataset.GuidedBraTSDataset3D import GuidedBraTSDataset3D #", "c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in enumerate(val_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D", "import SummaryWriter crop_size=config.crop_size size=crop_size[2] img_size=config.input_img_size parser = argparse.ArgumentParser(description='Patch-free 3D Medical Image Segmentation.') parser.add_argument('-dataset_path',type=str,default='/newdata/why/BraTS20',help='path", "to output') parser.add_argument('-bs', type=int, default=1, help='input batch size') parser.add_argument('-epoch', type=int, default=100, help='number of", "for x in range(epoch): model.train() loss_sum=0 print('\\n==>Epoch',x,': lr=',optimizer.param_groups[0]['lr'],'==>\\n') for data in iterator: (inputs,labels_seg,labels_sr,guidance,mask)=data", "# overlap0.5 for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for", "tqdm import tqdm # from tensorboardX import SummaryWriter crop_size=config.crop_size size=crop_size[2] img_size=config.input_img_size parser =", "outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 pr_sum = output_list.sum() gt_sum = label_list.sum() pr_gt_sum", "args = parser.parse_args() dataset_path=args.dataset_path lr=args.lr epoch=args.epoch batch_size=args.bs model_path=args.model_save_to w_sr=args.w_sr w_tf=args.w_tf pretrained_model=args.load_pretrained print(args) model=PFSeg3D(in_channels=1,out_channels=1).cuda()", "default=100, help='number of epochs') parser.add_argument('-lr', type=float, default=0.0001, help='learning rate') parser.add_argument('-w_sr', type=float, default=0.5, help='w_sr", "gamma=0.99) scheduler=pt.optim.lr_scheduler.ReduceLROnPlateau(optimizer,mode='max',patience=20) def ValModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2):", "dice_sum/len(val_dataset) def TestModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): #", "return dice_sum/len(val_dataset) def TestModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2):", "{:d} instead.'.format(batch_size)) model=pt.nn.DataParallel(model) if not pretrained_model=='': model.load_state_dict(pt.load(pretrained_model,map_location = 'cpu')) trainset=GuidedBraTSDataset3D(dataset_path,mode='train') valset=GuidedBraTSDataset3D(dataset_path,mode='val') testset=GuidedBraTSDataset3D(dataset_path,mode='test') train_dataset=pt.utils.data.DataLoader(trainset,batch_size=batch_size,shuffle=True,drop_last=True)", "print(\"Finished. Avg hausdorff: \",hd_sum/len(val_dataset)) return dice_sum/len(val_dataset) def TestModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2]))", "dice if args.v: final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('ValPhase_BraTS.png',final_img) print(\"dice:\",dice) hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished.", "= pt.optim.Adam(model.parameters(), lr=lr) # # scheduler = pt.optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.99) scheduler=pt.optim.lr_scheduler.ReduceLROnPlateau(optimizer,mode='max',patience=20) def ValModel(): model.eval()", "model=pt.nn.DataParallel(model) if not pretrained_model=='': model.load_state_dict(pt.load(pretrained_model,map_location = 'cpu')) trainset=GuidedBraTSDataset3D(dataset_path,mode='train') valset=GuidedBraTSDataset3D(dataset_path,mode='val') testset=GuidedBraTSDataset3D(dataset_path,mode='test') train_dataset=pt.utils.data.DataLoader(trainset,batch_size=batch_size,shuffle=True,drop_last=True) val_dataset=pt.utils.data.DataLoader(valset,batch_size=1,shuffle=True,drop_last=True) test_dataset=pt.utils.data.DataLoader(testset,batch_size=1,shuffle=True,drop_last=True)", "size has to be larger than GPU#. Set to {:d} instead.'.format(batch_size)) model=pt.nn.DataParallel(model) if", "= lossfunc_sr(outputs_sr, labels_sr) loss_pf = lossfunc_pf(outputs_seg,outputs_sr,labels_seg*labels_sr) loss_guide=lossfunc_sr(mask*outputs_sr,mask*labels_sr) loss=lossfunc_dice(outputs_seg,labels_seg)+loss_seg+w_sr*(loss_sr+loss_guide)+w_tf*loss_pf loss.backward() optimizer.step() loss_sum+=loss.item() if args.v:", "jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for", "for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in", "labels_seg) loss_sr = lossfunc_sr(outputs_sr, labels_sr) loss_pf = lossfunc_pf(outputs_seg,outputs_sr,labels_seg*labels_sr) loss_guide=lossfunc_sr(mask*outputs_sr,mask*labels_sr) loss=lossfunc_dice(outputs_seg,labels_seg)+loss_seg+w_sr*(loss_sr+loss_guide)+w_tf*loss_pf loss.backward() optimizer.step() loss_sum+=loss.item()", "pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) labels_seg = pt.autograd.Variable(labels_seg).type(pt.FloatTensor).cuda().unsqueeze(1) labels_sr = pt.autograd.Variable(labels_sr).type(pt.FloatTensor).cuda().unsqueeze(1) outputs_seg,outputs_sr = model(inputs,guidance) loss_seg = lossfunc_seg(outputs_seg,", "be larger than GPU#. Set to {:d} instead.'.format(batch_size)) model=pt.nn.DataParallel(model) if not pretrained_model=='': model.load_state_dict(pt.load(pretrained_model,map_location", "TaskFusionLoss from loss.DiceLoss import BinaryDiceLoss from config import config import argparse from tqdm", "parser.add_argument('-w_tf', type=float, default=0.5, help='w_tf of the lossfunc') parser.add_argument('-load_pretrained',type=str,default='',help='load a pretrained model') parser.add_argument('-v', help=\"increase", "import FALoss3D import cv2 from loss.TaskFusionLoss import TaskFusionLoss from loss.DiceLoss import BinaryDiceLoss from", "loss_sum+=loss.item() if args.v: final_img=np.zeros(shape=(2*size,2*size*5)) iterator.set_postfix(loss=loss.item(),loss_seg=loss_seg.item(),loss_sr=loss_sr.item()) final_img[:,0:(2*size)]=outputs_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(2*size):(4*size)]=outputs_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(4*size):(6*size)]=labels_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(6*size):(8*size)]=labels_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(8*size):]=cv2.resize(inputs.cpu().data.numpy()[0,0,size//4,:,:],((2*size),(2*size)))*255 cv2.imwrite('combine.png',final_img) print('==>End of", "dice: \",dice_sum/len(val_dataset),'\\n') print(\"Finished. Avg Jaccard: \",jc_sum/len(val_dataset)) print(\"Finished. Avg hausdorff: \",hd_sum/len(val_dataset)) return dice_sum/len(val_dataset) def", "weight_map=1./weight_map for i,data in enumerate(val_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance =", "hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Test Total dice: \",dice_sum/len(test_dataset),'\\n') print(\"Finished. Test Avg Jaccard: \",jc_sum/len(test_dataset)) print(\"Finished.", "cv2.imwrite('combine.png',final_img) print('==>End of epoch',x,'==>\\n') print('===VAL===>') dice=ValModel() scheduler.step(dice) if dice>best_dice: best_dice=dice print('New best dice!", "from tqdm import tqdm # from tensorboardX import SummaryWriter crop_size=config.crop_size size=crop_size[2] img_size=config.input_img_size parser", "import TaskFusionLoss from loss.DiceLoss import BinaryDiceLoss from config import config import argparse from", "iterator: (inputs,labels_seg,labels_sr,guidance,mask)=data optimizer.zero_grad() inputs = pt.autograd.Variable(inputs).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) labels_seg", "cv2.imwrite('ValPhase_BraTS.png',final_img) print(\"dice:\",dice) hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Total dice: \",dice_sum/len(val_dataset),'\\n') print(\"Finished. Avg Jaccard:", "b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): inputs3D = pt.autograd.Variable(inputs[:,a:(a+crop_size[0]),b:(b+crop_size[1]),c:(c+crop_size[2])]).type(pt.FloatTensor).cuda().unsqueeze(1) with pt.no_grad(): outputs3D,_", "output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 pr_sum = output_list.sum() gt_sum = label_list.sum() pr_gt_sum = np.sum(output_list[label_list ==", "= lossfunc_seg(outputs_seg, labels_seg) loss_sr = lossfunc_sr(outputs_sr, labels_sr) loss_pf = lossfunc_pf(outputs_seg,outputs_sr,labels_seg*labels_sr) loss_guide=lossfunc_sr(mask*outputs_sr,mask*labels_sr) loss=lossfunc_dice(outputs_seg,labels_seg)+loss_seg+w_sr*(loss_sr+loss_guide)+w_tf*loss_pf loss.backward()", "lossfunc_dice=BinaryDiceLoss() lossfunc_pf=TaskFusionLoss() optimizer = pt.optim.Adam(model.parameters(), lr=lr) # # scheduler = pt.optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.99) scheduler=pt.optim.lr_scheduler.ReduceLROnPlateau(optimizer,mode='max',patience=20)", "print('Batch size has to be larger than GPU#. Set to {:d} instead.'.format(batch_size)) model=pt.nn.DataParallel(model)", "of the lossfunc') parser.add_argument('-w_tf', type=float, default=0.5, help='w_tf of the lossfunc') parser.add_argument('-load_pretrained',type=str,default='',help='load a pretrained", "final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('TestPhase_BraTS.png',final_img) pr_sum = output_list.sum() gt_sum = label_list.sum() pr_gt_sum = np.sum(output_list[label_list ==", "pr_gt_sum = np.sum(output_list[label_list == 1]) dice = 2 * pr_gt_sum / (pr_sum +", "outputs3D,_ = model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 pr_sum = output_list.sum() gt_sum", "label_list.sum() pr_gt_sum = np.sum(output_list[label_list == 1]) dice = 2 * pr_gt_sum / (pr_sum", "final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('ValPhase_BraTS.png',final_img) print(\"dice:\",dice) hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Total dice: \",dice_sum/len(val_dataset),'\\n')", "ValModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for", "= model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('TestPhase_BraTS.png',final_img) pr_sum", "lossfunc_pf(outputs_seg,outputs_sr,labels_seg*labels_sr) loss_guide=lossfunc_sr(mask*outputs_sr,mask*labels_sr) loss=lossfunc_dice(outputs_seg,labels_seg)+loss_seg+w_sr*(loss_sr+loss_guide)+w_tf*loss_pf loss.backward() optimizer.step() loss_sum+=loss.item() if args.v: final_img=np.zeros(shape=(2*size,2*size*5)) iterator.set_postfix(loss=loss.item(),loss_seg=loss_seg.item(),loss_sr=loss_sr.item()) final_img[:,0:(2*size)]=outputs_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(2*size):(4*size)]=outputs_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(4*size):(6*size)]=labels_seg.cpu().data.numpy()[0,0,size//2,:,:]*255", "for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in", "valset=GuidedBraTSDataset3D(dataset_path,mode='val') testset=GuidedBraTSDataset3D(dataset_path,mode='test') train_dataset=pt.utils.data.DataLoader(trainset,batch_size=batch_size,shuffle=True,drop_last=True) val_dataset=pt.utils.data.DataLoader(valset,batch_size=1,shuffle=True,drop_last=True) test_dataset=pt.utils.data.DataLoader(testset,batch_size=1,shuffle=True,drop_last=True) lossfunc_sr=pt.nn.MSELoss() lossfunc_seg=pt.nn.BCELoss() lossfunc_dice=BinaryDiceLoss() lossfunc_pf=TaskFusionLoss() optimizer = pt.optim.Adam(model.parameters(), lr=lr)", "final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('ValPhase_BraTS.png',final_img) print(\"dice:\",dice) hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Total dice: \",dice_sum/len(val_dataset),'\\n') print(\"Finished.", "parser.add_argument('-bs', type=int, default=1, help='input batch size') parser.add_argument('-epoch', type=int, default=100, help='number of epochs') parser.add_argument('-lr',", "print('\\n==>Epoch',x,': lr=',optimizer.param_groups[0]['lr'],'==>\\n') for data in iterator: (inputs,labels_seg,labels_sr,guidance,mask)=data optimizer.zero_grad() inputs = pt.autograd.Variable(inputs).type(pt.FloatTensor).cuda().unsqueeze(1) guidance =", "import torch as pt import numpy as np from model.PFSeg import PFSeg3D from", "hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Total dice: \",dice_sum/len(val_dataset),'\\n') print(\"Finished. Avg Jaccard: \",jc_sum/len(val_dataset)) print(\"Finished. Avg hausdorff:", "in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): inputs3D = pt.autograd.Variable(inputs[:,a:(a+crop_size[0]),b:(b+crop_size[1]),c:(c+crop_size[2])]).type(pt.FloatTensor).cuda().unsqueeze(1) with pt.no_grad(): outputs3D,_ =", "overlap0.5 for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data", "range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in enumerate(test_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1)", "pt import numpy as np from model.PFSeg import PFSeg3D from medpy.metric.binary import jc,hd95", "a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2):", "Avg Jaccard: \",jc_sum/len(test_dataset)) print(\"Finished. Test Avg hausdorff: \",hd_sum/len(test_dataset)) return dice_sum/len(test_dataset) best_dice=0 iterator=tqdm(train_dataset, ncols=100)", "print('===VAL===>') dice=ValModel() scheduler.step(dice) if dice>best_dice: best_dice=dice print('New best dice! Model saved to',model_path+'/PFSeg_3D_BraTS_patch-free_bs'+str(batch_size)+'_best.pt') pt.save(model.state_dict(),", "print(\"Finished. Avg Jaccard: \",jc_sum/len(val_dataset)) print(\"Finished. Avg hausdorff: \",hd_sum/len(val_dataset)) return dice_sum/len(val_dataset) def TestModel(): model.eval()", "print('==>End of epoch',x,'==>\\n') print('===VAL===>') dice=ValModel() scheduler.step(dice) if dice>best_dice: best_dice=dice print('New best dice! Model", "Test Total dice: \",dice_sum/len(test_dataset),'\\n') print(\"Finished. Test Avg Jaccard: \",jc_sum/len(test_dataset)) print(\"Finished. Test Avg hausdorff:", "help='number of epochs') parser.add_argument('-lr', type=float, default=0.0001, help='learning rate') parser.add_argument('-w_sr', type=float, default=0.5, help='w_sr of", "default=1, help='input batch size') parser.add_argument('-epoch', type=int, default=100, help='number of epochs') parser.add_argument('-lr', type=float, default=0.0001,", "help=\"increase output verbosity\", action=\"store_true\") args = parser.parse_args() dataset_path=args.dataset_path lr=args.lr epoch=args.epoch batch_size=args.bs model_path=args.model_save_to w_sr=args.w_sr", "numpy as np from model.PFSeg import PFSeg3D from medpy.metric.binary import jc,hd95 from dataset.GuidedBraTSDataset3D", "Segmentation.') parser.add_argument('-dataset_path',type=str,default='/newdata/why/BraTS20',help='path to dataset') parser.add_argument('-model_save_to',type=str,default='.',help='path to output') parser.add_argument('-bs', type=int, default=1, help='input batch size')", "gt_sum) dice_sum += dice hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Test Total dice: \",dice_sum/len(test_dataset),'\\n')", "print(\"Finished. Test Avg hausdorff: \",hd_sum/len(test_dataset)) return dice_sum/len(test_dataset) best_dice=0 iterator=tqdm(train_dataset, ncols=100) for x in", "from tensorboardX import SummaryWriter crop_size=config.crop_size size=crop_size[2] img_size=config.input_img_size parser = argparse.ArgumentParser(description='Patch-free 3D Medical Image", "model') parser.add_argument('-v', help=\"increase output verbosity\", action=\"store_true\") args = parser.parse_args() dataset_path=args.dataset_path lr=args.lr epoch=args.epoch batch_size=args.bs", "if args.v: final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('ValPhase_BraTS.png',final_img) print(\"dice:\",dice) hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Total", "tensorboardX import SummaryWriter crop_size=config.crop_size size=crop_size[2] img_size=config.input_img_size parser = argparse.ArgumentParser(description='Patch-free 3D Medical Image Segmentation.')", "for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): inputs3D = pt.autograd.Variable(inputs[:,a:(a+crop_size[0]),b:(b+crop_size[1]),c:(c+crop_size[2])]).type(pt.FloatTensor).cuda().unsqueeze(1) with pt.no_grad():", "pt.autograd.Variable(inputs[:,a:(a+crop_size[0]),b:(b+crop_size[1]),c:(c+crop_size[2])]).type(pt.FloatTensor).cuda().unsqueeze(1) with pt.no_grad(): outputs3D,_ = model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2]))", "argparse.ArgumentParser(description='Patch-free 3D Medical Image Segmentation.') parser.add_argument('-dataset_path',type=str,default='/newdata/why/BraTS20',help='path to dataset') parser.add_argument('-model_save_to',type=str,default='.',help='path to output') parser.add_argument('-bs', type=int,", "parser.add_argument('-load_pretrained',type=str,default='',help='load a pretrained model') parser.add_argument('-v', help=\"increase output verbosity\", action=\"store_true\") args = parser.parse_args() dataset_path=args.dataset_path", "for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in enumerate(val_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data", "FALoss3D import cv2 from loss.TaskFusionLoss import TaskFusionLoss from loss.DiceLoss import BinaryDiceLoss from config", "'cpu')) trainset=GuidedBraTSDataset3D(dataset_path,mode='train') valset=GuidedBraTSDataset3D(dataset_path,mode='val') testset=GuidedBraTSDataset3D(dataset_path,mode='test') train_dataset=pt.utils.data.DataLoader(trainset,batch_size=batch_size,shuffle=True,drop_last=True) val_dataset=pt.utils.data.DataLoader(valset,batch_size=1,shuffle=True,drop_last=True) test_dataset=pt.utils.data.DataLoader(testset,batch_size=1,shuffle=True,drop_last=True) lossfunc_sr=pt.nn.MSELoss() lossfunc_seg=pt.nn.BCELoss() lossfunc_dice=BinaryDiceLoss() lossfunc_pf=TaskFusionLoss() optimizer =", "x in range(epoch): model.train() loss_sum=0 print('\\n==>Epoch',x,': lr=',optimizer.param_groups[0]['lr'],'==>\\n') for data in iterator: (inputs,labels_seg,labels_sr,guidance,mask)=data optimizer.zero_grad()", "range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): inputs3D =", "batch_size=pt.cuda.device_count() print('Batch size has to be larger than GPU#. Set to {:d} instead.'.format(batch_size))", "parser.add_argument('-v', help=\"increase output verbosity\", action=\"store_true\") args = parser.parse_args() dataset_path=args.dataset_path lr=args.lr epoch=args.epoch batch_size=args.bs model_path=args.model_save_to", "type=float, default=0.5, help='w_tf of the lossfunc') parser.add_argument('-load_pretrained',type=str,default='',help='load a pretrained model') parser.add_argument('-v', help=\"increase output", "i,data in enumerate(val_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask", "= pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): #", "= model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 pr_sum = output_list.sum() gt_sum =", "= 2 * pr_gt_sum / (pr_sum + gt_sum) dice_sum += dice hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0))", "loss.TaskFusionLoss import TaskFusionLoss from loss.DiceLoss import BinaryDiceLoss from config import config import argparse", "== 1]) dice = 2 * pr_gt_sum / (pr_sum + gt_sum) dice_sum +=", "jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Test Total dice: \",dice_sum/len(test_dataset),'\\n') print(\"Finished. Test Avg Jaccard: \",jc_sum/len(test_dataset))", "with pt.no_grad(): outputs3D,_ = model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 pr_sum =", "parser.add_argument('-epoch', type=int, default=100, help='number of epochs') parser.add_argument('-lr', type=float, default=0.0001, help='learning rate') parser.add_argument('-w_sr', type=float,", "Total dice: \",dice_sum/len(test_dataset),'\\n') print(\"Finished. Test Avg Jaccard: \",jc_sum/len(test_dataset)) print(\"Finished. Test Avg hausdorff: \",hd_sum/len(test_dataset))", "* pr_gt_sum / (pr_sum + gt_sum) dice_sum += dice if args.v: final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255", "help='input batch size') parser.add_argument('-epoch', type=int, default=100, help='number of epochs') parser.add_argument('-lr', type=float, default=0.0001, help='learning", "pretrained_model=args.load_pretrained print(args) model=PFSeg3D(in_channels=1,out_channels=1).cuda() if pt.cuda.device_count()>1: if batch_size<pt.cuda.device_count(): batch_size=pt.cuda.device_count() print('Batch size has to be", "return dice_sum/len(test_dataset) best_dice=0 iterator=tqdm(train_dataset, ncols=100) for x in range(epoch): model.train() loss_sum=0 print('\\n==>Epoch',x,': lr=',optimizer.param_groups[0]['lr'],'==>\\n')", "final_img[:,0:(2*size)]=outputs_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(2*size):(4*size)]=outputs_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(4*size):(6*size)]=labels_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(6*size):(8*size)]=labels_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(8*size):]=cv2.resize(inputs.cpu().data.numpy()[0,0,size//4,:,:],((2*size),(2*size)))*255 cv2.imwrite('combine.png',final_img) print('==>End of epoch',x,'==>\\n') print('===VAL===>') dice=ValModel() scheduler.step(dice) if", "tqdm # from tensorboardX import SummaryWriter crop_size=config.crop_size size=crop_size[2] img_size=config.input_img_size parser = argparse.ArgumentParser(description='Patch-free 3D", "in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): inputs3D = pt.autograd.Variable(inputs[:,a:(a+crop_size[0]),b:(b+crop_size[1]),c:(c+crop_size[2])]).type(pt.FloatTensor).cuda().unsqueeze(1) with pt.no_grad(): outputs3D,_ = model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy())", "print(\"dice:\",dice) hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Total dice: \",dice_sum/len(val_dataset),'\\n') print(\"Finished. Avg Jaccard: \",jc_sum/len(val_dataset))", "model.train() loss_sum=0 print('\\n==>Epoch',x,': lr=',optimizer.param_groups[0]['lr'],'==>\\n') for data in iterator: (inputs,labels_seg,labels_sr,guidance,mask)=data optimizer.zero_grad() inputs = pt.autograd.Variable(inputs).type(pt.FloatTensor).cuda().unsqueeze(1)", "Avg Jaccard: \",jc_sum/len(val_dataset)) print(\"Finished. Avg hausdorff: \",hd_sum/len(val_dataset)) return dice_sum/len(val_dataset) def TestModel(): model.eval() dice_sum=0", "model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 pr_sum = output_list.sum() gt_sum = label_list.sum()", "np from model.PFSeg import PFSeg3D from medpy.metric.binary import jc,hd95 from dataset.GuidedBraTSDataset3D import GuidedBraTSDataset3D", "in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in enumerate(test_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D =", "c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): inputs3D = pt.autograd.Variable(inputs[:,a:(a+crop_size[0]),b:(b+crop_size[1]),c:(c+crop_size[2])]).type(pt.FloatTensor).cuda().unsqueeze(1) with pt.no_grad(): outputs3D,_ = model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D", "= 'cpu')) trainset=GuidedBraTSDataset3D(dataset_path,mode='train') valset=GuidedBraTSDataset3D(dataset_path,mode='val') testset=GuidedBraTSDataset3D(dataset_path,mode='test') train_dataset=pt.utils.data.DataLoader(trainset,batch_size=batch_size,shuffle=True,drop_last=True) val_dataset=pt.utils.data.DataLoader(valset,batch_size=1,shuffle=True,drop_last=True) test_dataset=pt.utils.data.DataLoader(testset,batch_size=1,shuffle=True,drop_last=True) lossfunc_sr=pt.nn.MSELoss() lossfunc_seg=pt.nn.BCELoss() lossfunc_dice=BinaryDiceLoss() lossfunc_pf=TaskFusionLoss() optimizer", "Medical Image Segmentation.') parser.add_argument('-dataset_path',type=str,default='/newdata/why/BraTS20',help='path to dataset') parser.add_argument('-model_save_to',type=str,default='.',help='path to output') parser.add_argument('-bs', type=int, default=1, help='input", "args.v: final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('ValPhase_BraTS.png',final_img) print(\"dice:\",dice) hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Total dice:", "print(args) model=PFSeg3D(in_channels=1,out_channels=1).cuda() if pt.cuda.device_count()>1: if batch_size<pt.cuda.device_count(): batch_size=pt.cuda.device_count() print('Batch size has to be larger", "in iterator: (inputs,labels_seg,labels_sr,guidance,mask)=data optimizer.zero_grad() inputs = pt.autograd.Variable(inputs).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1)", "BinaryDiceLoss from config import config import argparse from tqdm import tqdm # from", "scheduler.step(dice) if dice>best_dice: best_dice=dice print('New best dice! Model saved to',model_path+'/PFSeg_3D_BraTS_patch-free_bs'+str(batch_size)+'_best.pt') pt.save(model.state_dict(), model_path+'/PFSeg_3D_BraTS_patch-free_bs'+str(batch_size)+'_best.pt') print('===TEST===>')", "default=0.5, help='w_sr of the lossfunc') parser.add_argument('-w_tf', type=float, default=0.5, help='w_tf of the lossfunc') parser.add_argument('-load_pretrained',type=str,default='',help='load", "loss_guide=lossfunc_sr(mask*outputs_sr,mask*labels_sr) loss=lossfunc_dice(outputs_seg,labels_seg)+loss_seg+w_sr*(loss_sr+loss_guide)+w_tf*loss_pf loss.backward() optimizer.step() loss_sum+=loss.item() if args.v: final_img=np.zeros(shape=(2*size,2*size*5)) iterator.set_postfix(loss=loss.item(),loss_seg=loss_seg.item(),loss_sr=loss_sr.item()) final_img[:,0:(2*size)]=outputs_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(2*size):(4*size)]=outputs_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(4*size):(6*size)]=labels_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(6*size):(8*size)]=labels_sr.cpu().data.numpy()[0,0,size//2,:,:]*255", "enumerate(val_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1)", "weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c", "iterator=tqdm(train_dataset, ncols=100) for x in range(epoch): model.train() loss_sum=0 print('\\n==>Epoch',x,': lr=',optimizer.param_groups[0]['lr'],'==>\\n') for data in", "size=crop_size[2] img_size=config.input_img_size parser = argparse.ArgumentParser(description='Patch-free 3D Medical Image Segmentation.') parser.add_argument('-dataset_path',type=str,default='/newdata/why/BraTS20',help='path to dataset') parser.add_argument('-model_save_to',type=str,default='.',help='path", "img_size=config.input_img_size parser = argparse.ArgumentParser(description='Patch-free 3D Medical Image Segmentation.') parser.add_argument('-dataset_path',type=str,default='/newdata/why/BraTS20',help='path to dataset') parser.add_argument('-model_save_to',type=str,default='.',help='path to", "import numpy as np from model.PFSeg import PFSeg3D from medpy.metric.binary import jc,hd95 from", "action=\"store_true\") args = parser.parse_args() dataset_path=args.dataset_path lr=args.lr epoch=args.epoch batch_size=args.bs model_path=args.model_save_to w_sr=args.w_sr w_tf=args.w_tf pretrained_model=args.load_pretrained print(args)", "import PFSeg3D from medpy.metric.binary import jc,hd95 from dataset.GuidedBraTSDataset3D import GuidedBraTSDataset3D # from loss.FALoss3D", "dice=ValModel() scheduler.step(dice) if dice>best_dice: best_dice=dice print('New best dice! Model saved to',model_path+'/PFSeg_3D_BraTS_patch-free_bs'+str(batch_size)+'_best.pt') pt.save(model.state_dict(), model_path+'/PFSeg_3D_BraTS_patch-free_bs'+str(batch_size)+'_best.pt')", "guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for", "test_dataset=pt.utils.data.DataLoader(testset,batch_size=1,shuffle=True,drop_last=True) lossfunc_sr=pt.nn.MSELoss() lossfunc_seg=pt.nn.BCELoss() lossfunc_dice=BinaryDiceLoss() lossfunc_pf=TaskFusionLoss() optimizer = pt.optim.Adam(model.parameters(), lr=lr) # # scheduler =", "dataset_path=args.dataset_path lr=args.lr epoch=args.epoch batch_size=args.bs model_path=args.model_save_to w_sr=args.w_sr w_tf=args.w_tf pretrained_model=args.load_pretrained print(args) model=PFSeg3D(in_channels=1,out_channels=1).cuda() if pt.cuda.device_count()>1: if", "= pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) labels_seg = pt.autograd.Variable(labels_seg).type(pt.FloatTensor).cuda().unsqueeze(1) labels_sr = pt.autograd.Variable(labels_sr).type(pt.FloatTensor).cuda().unsqueeze(1) outputs_seg,outputs_sr =", "trainset=GuidedBraTSDataset3D(dataset_path,mode='train') valset=GuidedBraTSDataset3D(dataset_path,mode='val') testset=GuidedBraTSDataset3D(dataset_path,mode='test') train_dataset=pt.utils.data.DataLoader(trainset,batch_size=batch_size,shuffle=True,drop_last=True) val_dataset=pt.utils.data.DataLoader(valset,batch_size=1,shuffle=True,drop_last=True) test_dataset=pt.utils.data.DataLoader(testset,batch_size=1,shuffle=True,drop_last=True) lossfunc_sr=pt.nn.MSELoss() lossfunc_seg=pt.nn.BCELoss() lossfunc_dice=BinaryDiceLoss() lossfunc_pf=TaskFusionLoss() optimizer = pt.optim.Adam(model.parameters(),", "hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Test Total dice: \",dice_sum/len(test_dataset),'\\n') print(\"Finished. Test Avg Jaccard:", "scheduler = pt.optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.99) scheduler=pt.optim.lr_scheduler.ReduceLROnPlateau(optimizer,mode='max',patience=20) def ValModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for", "SummaryWriter crop_size=config.crop_size size=crop_size[2] img_size=config.input_img_size parser = argparse.ArgumentParser(description='Patch-free 3D Medical Image Segmentation.') parser.add_argument('-dataset_path',type=str,default='/newdata/why/BraTS20',help='path to", "+= dice if args.v: final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('ValPhase_BraTS.png',final_img) print(\"dice:\",dice) hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard", "inputs = pt.autograd.Variable(inputs).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) labels_seg = pt.autograd.Variable(labels_seg).type(pt.FloatTensor).cuda().unsqueeze(1) labels_sr", "weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in enumerate(test_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance", "if args.v: final_img=np.zeros(shape=(2*size,2*size*5)) iterator.set_postfix(loss=loss.item(),loss_seg=loss_seg.item(),loss_sr=loss_sr.item()) final_img[:,0:(2*size)]=outputs_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(2*size):(4*size)]=outputs_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(4*size):(6*size)]=labels_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(6*size):(8*size)]=labels_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(8*size):]=cv2.resize(inputs.cpu().data.numpy()[0,0,size//4,:,:],((2*size),(2*size)))*255 cv2.imwrite('combine.png',final_img) print('==>End of epoch',x,'==>\\n')", "as np from model.PFSeg import PFSeg3D from medpy.metric.binary import jc,hd95 from dataset.GuidedBraTSDataset3D import", "range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in enumerate(val_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2]))", "batch size') parser.add_argument('-epoch', type=int, default=100, help='number of epochs') parser.add_argument('-lr', type=float, default=0.0001, help='learning rate')", "range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map", "\",jc_sum/len(test_dataset)) print(\"Finished. Test Avg hausdorff: \",hd_sum/len(test_dataset)) return dice_sum/len(test_dataset) best_dice=0 iterator=tqdm(train_dataset, ncols=100) for x", "labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2):", "np.sum(output_list[label_list == 1]) dice = 2 * pr_gt_sum / (pr_sum + gt_sum) dice_sum", "lossfunc_pf=TaskFusionLoss() optimizer = pt.optim.Adam(model.parameters(), lr=lr) # # scheduler = pt.optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.99) scheduler=pt.optim.lr_scheduler.ReduceLROnPlateau(optimizer,mode='max',patience=20) def", "from loss.TaskFusionLoss import TaskFusionLoss from loss.DiceLoss import BinaryDiceLoss from config import config import", "if dice>best_dice: best_dice=dice print('New best dice! Model saved to',model_path+'/PFSeg_3D_BraTS_patch-free_bs'+str(batch_size)+'_best.pt') pt.save(model.state_dict(), model_path+'/PFSeg_3D_BraTS_patch-free_bs'+str(batch_size)+'_best.pt') print('===TEST===>') TestModel()", "final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('TestPhase_BraTS.png',final_img) pr_sum = output_list.sum() gt_sum = label_list.sum() pr_gt_sum = np.sum(output_list[label_list == 1])", "parser.parse_args() dataset_path=args.dataset_path lr=args.lr epoch=args.epoch batch_size=args.bs model_path=args.model_save_to w_sr=args.w_sr w_tf=args.w_tf pretrained_model=args.load_pretrained print(args) model=PFSeg3D(in_channels=1,out_channels=1).cuda() if pt.cuda.device_count()>1:", "final_img[:,(2*size):(4*size)]=outputs_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(4*size):(6*size)]=labels_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(6*size):(8*size)]=labels_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(8*size):]=cv2.resize(inputs.cpu().data.numpy()[0,0,size//4,:,:],((2*size),(2*size)))*255 cv2.imwrite('combine.png',final_img) print('==>End of epoch',x,'==>\\n') print('===VAL===>') dice=ValModel() scheduler.step(dice) if dice>best_dice:", "in enumerate(test_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask =", "= 2 * pr_gt_sum / (pr_sum + gt_sum) dice_sum += dice if args.v:", "= argparse.ArgumentParser(description='Patch-free 3D Medical Image Segmentation.') parser.add_argument('-dataset_path',type=str,default='/newdata/why/BraTS20',help='path to dataset') parser.add_argument('-model_save_to',type=str,default='.',help='path to output') parser.add_argument('-bs',", "from config import config import argparse from tqdm import tqdm # from tensorboardX", "b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in enumerate(val_dataset):", "parser.add_argument('-w_sr', type=float, default=0.5, help='w_sr of the lossfunc') parser.add_argument('-w_tf', type=float, default=0.5, help='w_tf of the", "help='learning rate') parser.add_argument('-w_sr', type=float, default=0.5, help='w_sr of the lossfunc') parser.add_argument('-w_tf', type=float, default=0.5, help='w_tf", "testset=GuidedBraTSDataset3D(dataset_path,mode='test') train_dataset=pt.utils.data.DataLoader(trainset,batch_size=batch_size,shuffle=True,drop_last=True) val_dataset=pt.utils.data.DataLoader(valset,batch_size=1,shuffle=True,drop_last=True) test_dataset=pt.utils.data.DataLoader(testset,batch_size=1,shuffle=True,drop_last=True) lossfunc_sr=pt.nn.MSELoss() lossfunc_seg=pt.nn.BCELoss() lossfunc_dice=BinaryDiceLoss() lossfunc_pf=TaskFusionLoss() optimizer = pt.optim.Adam(model.parameters(), lr=lr) #", "final_img=np.zeros(shape=(2*size,2*size*5)) iterator.set_postfix(loss=loss.item(),loss_seg=loss_seg.item(),loss_sr=loss_sr.item()) final_img[:,0:(2*size)]=outputs_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(2*size):(4*size)]=outputs_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(4*size):(6*size)]=labels_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(6*size):(8*size)]=labels_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(8*size):]=cv2.resize(inputs.cpu().data.numpy()[0,0,size//4,:,:],((2*size),(2*size)))*255 cv2.imwrite('combine.png',final_img) print('==>End of epoch',x,'==>\\n') print('===VAL===>') dice=ValModel()", "guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) labels_seg = pt.autograd.Variable(labels_seg).type(pt.FloatTensor).cuda().unsqueeze(1) labels_sr = pt.autograd.Variable(labels_sr).type(pt.FloatTensor).cuda().unsqueeze(1) outputs_seg,outputs_sr", "model=PFSeg3D(in_channels=1,out_channels=1).cuda() if pt.cuda.device_count()>1: if batch_size<pt.cuda.device_count(): batch_size=pt.cuda.device_count() print('Batch size has to be larger than", "# scheduler = pt.optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.99) scheduler=pt.optim.lr_scheduler.ReduceLROnPlateau(optimizer,mode='max',patience=20) def ValModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2]))", "parser.add_argument('-dataset_path',type=str,default='/newdata/why/BraTS20',help='path to dataset') parser.add_argument('-model_save_to',type=str,default='.',help='path to output') parser.add_argument('-bs', type=int, default=1, help='input batch size') parser.add_argument('-epoch',", "= parser.parse_args() dataset_path=args.dataset_path lr=args.lr epoch=args.epoch batch_size=args.bs model_path=args.model_save_to w_sr=args.w_sr w_tf=args.w_tf pretrained_model=args.load_pretrained print(args) model=PFSeg3D(in_channels=1,out_channels=1).cuda() if", "to dataset') parser.add_argument('-model_save_to',type=str,default='.',help='path to output') parser.add_argument('-bs', type=int, default=1, help='input batch size') parser.add_argument('-epoch', type=int,", "2 * pr_gt_sum / (pr_sum + gt_sum) dice_sum += dice hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff", "Image Segmentation.') parser.add_argument('-dataset_path',type=str,default='/newdata/why/BraTS20',help='path to dataset') parser.add_argument('-model_save_to',type=str,default='.',help='path to output') parser.add_argument('-bs', type=int, default=1, help='input batch", "dataset') parser.add_argument('-model_save_to',type=str,default='.',help='path to output') parser.add_argument('-bs', type=int, default=1, help='input batch size') parser.add_argument('-epoch', type=int, default=100,", "pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) labels_seg = pt.autograd.Variable(labels_seg).type(pt.FloatTensor).cuda().unsqueeze(1) labels_sr = pt.autograd.Variable(labels_sr).type(pt.FloatTensor).cuda().unsqueeze(1) outputs_seg,outputs_sr = model(inputs,guidance)", "Jaccard: \",jc_sum/len(val_dataset)) print(\"Finished. Avg hausdorff: \",hd_sum/len(val_dataset)) return dice_sum/len(val_dataset) def TestModel(): model.eval() dice_sum=0 hd_sum=0", "range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in enumerate(val_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1)", "mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2):", "range(epoch): model.train() loss_sum=0 print('\\n==>Epoch',x,': lr=',optimizer.param_groups[0]['lr'],'==>\\n') for data in iterator: (inputs,labels_seg,labels_sr,guidance,mask)=data optimizer.zero_grad() inputs =", "# # scheduler = pt.optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.99) scheduler=pt.optim.lr_scheduler.ReduceLROnPlateau(optimizer,mode='max',patience=20) def ValModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0", "TestModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for", "final_img[:,(6*size):(8*size)]=labels_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(8*size):]=cv2.resize(inputs.cpu().data.numpy()[0,0,size//4,:,:],((2*size),(2*size)))*255 cv2.imwrite('combine.png',final_img) print('==>End of epoch',x,'==>\\n') print('===VAL===>') dice=ValModel() scheduler.step(dice) if dice>best_dice: best_dice=dice print('New", "for i,data in enumerate(test_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1)", "= pt.autograd.Variable(labels_seg).type(pt.FloatTensor).cuda().unsqueeze(1) labels_sr = pt.autograd.Variable(labels_sr).type(pt.FloatTensor).cuda().unsqueeze(1) outputs_seg,outputs_sr = model(inputs,guidance) loss_seg = lossfunc_seg(outputs_seg, labels_seg) loss_sr", "Set to {:d} instead.'.format(batch_size)) model=pt.nn.DataParallel(model) if not pretrained_model=='': model.load_state_dict(pt.load(pretrained_model,map_location = 'cpu')) trainset=GuidedBraTSDataset3D(dataset_path,mode='train') valset=GuidedBraTSDataset3D(dataset_path,mode='val')", "enumerate(test_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1)", "label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('TestPhase_BraTS.png',final_img) pr_sum = output_list.sum() gt_sum =", "b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in enumerate(test_dataset):", "help='w_sr of the lossfunc') parser.add_argument('-w_tf', type=float, default=0.5, help='w_tf of the lossfunc') parser.add_argument('-load_pretrained',type=str,default='',help='load a", "Total dice: \",dice_sum/len(val_dataset),'\\n') print(\"Finished. Avg Jaccard: \",jc_sum/len(val_dataset)) print(\"Finished. Avg hausdorff: \",hd_sum/len(val_dataset)) return dice_sum/len(val_dataset)", "import cv2 from loss.TaskFusionLoss import TaskFusionLoss from loss.DiceLoss import BinaryDiceLoss from config import", "in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1", "argparse from tqdm import tqdm # from tensorboardX import SummaryWriter crop_size=config.crop_size size=crop_size[2] img_size=config.input_img_size", "mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) labels_seg = pt.autograd.Variable(labels_seg).type(pt.FloatTensor).cuda().unsqueeze(1) labels_sr = pt.autograd.Variable(labels_sr).type(pt.FloatTensor).cuda().unsqueeze(1) outputs_seg,outputs_sr = model(inputs,guidance) loss_seg", "in range(epoch): model.train() loss_sum=0 print('\\n==>Epoch',x,': lr=',optimizer.param_groups[0]['lr'],'==>\\n') for data in iterator: (inputs,labels_seg,labels_sr,guidance,mask)=data optimizer.zero_grad() inputs", "output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 pr_sum = output_list.sum() gt_sum = label_list.sum() pr_gt_sum =", "loss_seg = lossfunc_seg(outputs_seg, labels_seg) loss_sr = lossfunc_sr(outputs_sr, labels_sr) loss_pf = lossfunc_pf(outputs_seg,outputs_sr,labels_seg*labels_sr) loss_guide=lossfunc_sr(mask*outputs_sr,mask*labels_sr) loss=lossfunc_dice(outputs_seg,labels_seg)+loss_seg+w_sr*(loss_sr+loss_guide)+w_tf*loss_pf", "default=0.0001, help='learning rate') parser.add_argument('-w_sr', type=float, default=0.5, help='w_sr of the lossfunc') parser.add_argument('-w_tf', type=float, default=0.5,", "in enumerate(val_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask =", "+= dice hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Test Total dice: \",dice_sum/len(test_dataset),'\\n') print(\"Finished. Test", "as pt import numpy as np from model.PFSeg import PFSeg3D from medpy.metric.binary import", "final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('ValPhase_BraTS.png',final_img) print(\"dice:\",dice) hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Total dice: \",dice_sum/len(val_dataset),'\\n') print(\"Finished. Avg", "verbosity\", action=\"store_true\") args = parser.parse_args() dataset_path=args.dataset_path lr=args.lr epoch=args.epoch batch_size=args.bs model_path=args.model_save_to w_sr=args.w_sr w_tf=args.w_tf pretrained_model=args.load_pretrained", "\",dice_sum/len(test_dataset),'\\n') print(\"Finished. Test Avg Jaccard: \",jc_sum/len(test_dataset)) print(\"Finished. Test Avg hausdorff: \",hd_sum/len(test_dataset)) return dice_sum/len(test_dataset)", "output_list[output_list>=0.5]=1 final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('TestPhase_BraTS.png',final_img) pr_sum = output_list.sum() gt_sum = label_list.sum() pr_gt_sum =", "weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in enumerate(val_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance", "in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in enumerate(val_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D =", "loss.backward() optimizer.step() loss_sum+=loss.item() if args.v: final_img=np.zeros(shape=(2*size,2*size*5)) iterator.set_postfix(loss=loss.item(),loss_seg=loss_seg.item(),loss_sr=loss_sr.item()) final_img[:,0:(2*size)]=outputs_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(2*size):(4*size)]=outputs_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(4*size):(6*size)]=labels_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(6*size):(8*size)]=labels_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(8*size):]=cv2.resize(inputs.cpu().data.numpy()[0,0,size//4,:,:],((2*size),(2*size)))*255 cv2.imwrite('combine.png',final_img)", "type=int, default=100, help='number of epochs') parser.add_argument('-lr', type=float, default=0.0001, help='learning rate') parser.add_argument('-w_sr', type=float, default=0.5,", "for i,data in enumerate(val_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1)", "model.load_state_dict(pt.load(pretrained_model,map_location = 'cpu')) trainset=GuidedBraTSDataset3D(dataset_path,mode='train') valset=GuidedBraTSDataset3D(dataset_path,mode='val') testset=GuidedBraTSDataset3D(dataset_path,mode='test') train_dataset=pt.utils.data.DataLoader(trainset,batch_size=batch_size,shuffle=True,drop_last=True) val_dataset=pt.utils.data.DataLoader(valset,batch_size=1,shuffle=True,drop_last=True) test_dataset=pt.utils.data.DataLoader(testset,batch_size=1,shuffle=True,drop_last=True) lossfunc_sr=pt.nn.MSELoss() lossfunc_seg=pt.nn.BCELoss() lossfunc_dice=BinaryDiceLoss() lossfunc_pf=TaskFusionLoss()", "dice_sum/len(test_dataset) best_dice=0 iterator=tqdm(train_dataset, ncols=100) for x in range(epoch): model.train() loss_sum=0 print('\\n==>Epoch',x,': lr=',optimizer.param_groups[0]['lr'],'==>\\n') for", "outputs_seg,outputs_sr = model(inputs,guidance) loss_seg = lossfunc_seg(outputs_seg, labels_seg) loss_sr = lossfunc_sr(outputs_sr, labels_sr) loss_pf =", "final_img[:,(8*size):]=cv2.resize(inputs.cpu().data.numpy()[0,0,size//4,:,:],((2*size),(2*size)))*255 cv2.imwrite('combine.png',final_img) print('==>End of epoch',x,'==>\\n') print('===VAL===>') dice=ValModel() scheduler.step(dice) if dice>best_dice: best_dice=dice print('New best", "hausdorff: \",hd_sum/len(val_dataset)) return dice_sum/len(val_dataset) def TestModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a", "w_tf=args.w_tf pretrained_model=args.load_pretrained print(args) model=PFSeg3D(in_channels=1,out_channels=1).cuda() if pt.cuda.device_count()>1: if batch_size<pt.cuda.device_count(): batch_size=pt.cuda.device_count() print('Batch size has to", "# from loss.FALoss3D import FALoss3D import cv2 from loss.TaskFusionLoss import TaskFusionLoss from loss.DiceLoss", "train_dataset=pt.utils.data.DataLoader(trainset,batch_size=batch_size,shuffle=True,drop_last=True) val_dataset=pt.utils.data.DataLoader(valset,batch_size=1,shuffle=True,drop_last=True) test_dataset=pt.utils.data.DataLoader(testset,batch_size=1,shuffle=True,drop_last=True) lossfunc_sr=pt.nn.MSELoss() lossfunc_seg=pt.nn.BCELoss() lossfunc_dice=BinaryDiceLoss() lossfunc_pf=TaskFusionLoss() optimizer = pt.optim.Adam(model.parameters(), lr=lr) # #", "(inputs,labels_seg,labels_sr,guidance,mask)=data optimizer.zero_grad() inputs = pt.autograd.Variable(inputs).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) labels_seg =", "ncols=100) for x in range(epoch): model.train() loss_sum=0 print('\\n==>Epoch',x,': lr=',optimizer.param_groups[0]['lr'],'==>\\n') for data in iterator:", "data in iterator: (inputs,labels_seg,labels_sr,guidance,mask)=data optimizer.zero_grad() inputs = pt.autograd.Variable(inputs).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask =", "iterator.set_postfix(loss=loss.item(),loss_seg=loss_seg.item(),loss_sr=loss_sr.item()) final_img[:,0:(2*size)]=outputs_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(2*size):(4*size)]=outputs_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(4*size):(6*size)]=labels_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(6*size):(8*size)]=labels_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(8*size):]=cv2.resize(inputs.cpu().data.numpy()[0,0,size//4,:,:],((2*size),(2*size)))*255 cv2.imwrite('combine.png',final_img) print('==>End of epoch',x,'==>\\n') print('===VAL===>') dice=ValModel() scheduler.step(dice)", "# from tensorboardX import SummaryWriter crop_size=config.crop_size size=crop_size[2] img_size=config.input_img_size parser = argparse.ArgumentParser(description='Patch-free 3D Medical", "cv2 from loss.TaskFusionLoss import TaskFusionLoss from loss.DiceLoss import BinaryDiceLoss from config import config", "jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Total dice: \",dice_sum/len(val_dataset),'\\n') print(\"Finished. Avg Jaccard: \",jc_sum/len(val_dataset)) print(\"Finished. Avg", "print(\"Finished. Total dice: \",dice_sum/len(val_dataset),'\\n') print(\"Finished. Avg Jaccard: \",jc_sum/len(val_dataset)) print(\"Finished. Avg hausdorff: \",hd_sum/len(val_dataset)) return", "lossfunc') parser.add_argument('-load_pretrained',type=str,default='',help='load a pretrained model') parser.add_argument('-v', help=\"increase output verbosity\", action=\"store_true\") args = parser.parse_args()", "val_dataset=pt.utils.data.DataLoader(valset,batch_size=1,shuffle=True,drop_last=True) test_dataset=pt.utils.data.DataLoader(testset,batch_size=1,shuffle=True,drop_last=True) lossfunc_sr=pt.nn.MSELoss() lossfunc_seg=pt.nn.BCELoss() lossfunc_dice=BinaryDiceLoss() lossfunc_pf=TaskFusionLoss() optimizer = pt.optim.Adam(model.parameters(), lr=lr) # # scheduler", "output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) for", "lr=',optimizer.param_groups[0]['lr'],'==>\\n') for data in iterator: (inputs,labels_seg,labels_sr,guidance,mask)=data optimizer.zero_grad() inputs = pt.autograd.Variable(inputs).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1)", "pt.autograd.Variable(labels_sr).type(pt.FloatTensor).cuda().unsqueeze(1) outputs_seg,outputs_sr = model(inputs,guidance) loss_seg = lossfunc_seg(outputs_seg, labels_seg) loss_sr = lossfunc_sr(outputs_sr, labels_sr) loss_pf", "in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): inputs3D", "Test Avg Jaccard: \",jc_sum/len(test_dataset)) print(\"Finished. Test Avg hausdorff: \",hd_sum/len(test_dataset)) return dice_sum/len(test_dataset) best_dice=0 iterator=tqdm(train_dataset,", "pt.autograd.Variable(inputs[:,a:(a+crop_size[0]),b:(b+crop_size[1]),c:(c+crop_size[2])]).type(pt.FloatTensor).cuda().unsqueeze(1) with pt.no_grad(): outputs3D,_ = model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 pr_sum", "outputs3D,_ = model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('TestPhase_BraTS.png',final_img)", "print(\"Finished. Test Avg Jaccard: \",jc_sum/len(test_dataset)) print(\"Finished. Test Avg hausdorff: \",hd_sum/len(test_dataset)) return dice_sum/len(test_dataset) best_dice=0", "in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in enumerate(test_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2]))", "final_img[:,(4*size):(6*size)]=labels_seg.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(6*size):(8*size)]=labels_sr.cpu().data.numpy()[0,0,size//2,:,:]*255 final_img[:,(8*size):]=cv2.resize(inputs.cpu().data.numpy()[0,0,size//4,:,:],((2*size),(2*size)))*255 cv2.imwrite('combine.png',final_img) print('==>End of epoch',x,'==>\\n') print('===VAL===>') dice=ValModel() scheduler.step(dice) if dice>best_dice: best_dice=dice", "pt.no_grad(): outputs3D,_ = model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 pr_sum = output_list.sum()", "= pt.optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.99) scheduler=pt.optim.lr_scheduler.ReduceLROnPlateau(optimizer,mode='max',patience=20) def ValModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a", "hausdorff: \",hd_sum/len(test_dataset)) return dice_sum/len(test_dataset) best_dice=0 iterator=tqdm(train_dataset, ncols=100) for x in range(epoch): model.train() loss_sum=0", "of epochs') parser.add_argument('-lr', type=float, default=0.0001, help='learning rate') parser.add_argument('-w_sr', type=float, default=0.5, help='w_sr of the", "of epoch',x,'==>\\n') print('===VAL===>') dice=ValModel() scheduler.step(dice) if dice>best_dice: best_dice=dice print('New best dice! Model saved", "best_dice=0 iterator=tqdm(train_dataset, ncols=100) for x in range(epoch): model.train() loss_sum=0 print('\\n==>Epoch',x,': lr=',optimizer.param_groups[0]['lr'],'==>\\n') for data", "gt_sum) dice_sum += dice if args.v: final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('ValPhase_BraTS.png',final_img) print(\"dice:\",dice) hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0))", "dice: \",dice_sum/len(test_dataset),'\\n') print(\"Finished. Test Avg Jaccard: \",jc_sum/len(test_dataset)) print(\"Finished. Test Avg hausdorff: \",hd_sum/len(test_dataset)) return", "output') parser.add_argument('-bs', type=int, default=1, help='input batch size') parser.add_argument('-epoch', type=int, default=100, help='number of epochs')", "= pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for", "Avg hausdorff: \",hd_sum/len(test_dataset)) return dice_sum/len(test_dataset) best_dice=0 iterator=tqdm(train_dataset, ncols=100) for x in range(epoch): model.train()", "Test Avg hausdorff: \",hd_sum/len(test_dataset)) return dice_sum/len(test_dataset) best_dice=0 iterator=tqdm(train_dataset, ncols=100) for x in range(epoch):", "GuidedBraTSDataset3D # from loss.FALoss3D import FALoss3D import cv2 from loss.TaskFusionLoss import TaskFusionLoss from", "medpy.metric.binary import jc,hd95 from dataset.GuidedBraTSDataset3D import GuidedBraTSDataset3D # from loss.FALoss3D import FALoss3D import", "\",jc_sum/len(val_dataset)) print(\"Finished. Avg hausdorff: \",hd_sum/len(val_dataset)) return dice_sum/len(val_dataset) def TestModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0", "size') parser.add_argument('-epoch', type=int, default=100, help='number of epochs') parser.add_argument('-lr', type=float, default=0.0001, help='learning rate') parser.add_argument('-w_sr',", "label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 pr_sum = output_list.sum() gt_sum = label_list.sum() pr_gt_sum = np.sum(output_list[label_list", "pt.optim.Adam(model.parameters(), lr=lr) # # scheduler = pt.optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.99) scheduler=pt.optim.lr_scheduler.ReduceLROnPlateau(optimizer,mode='max',patience=20) def ValModel(): model.eval() dice_sum=0", "pt.autograd.Variable(inputs).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) labels_seg = pt.autograd.Variable(labels_seg).type(pt.FloatTensor).cuda().unsqueeze(1) labels_sr = pt.autograd.Variable(labels_sr).type(pt.FloatTensor).cuda().unsqueeze(1)", "loss_pf = lossfunc_pf(outputs_seg,outputs_sr,labels_seg*labels_sr) loss_guide=lossfunc_sr(mask*outputs_sr,mask*labels_sr) loss=lossfunc_dice(outputs_seg,labels_seg)+loss_seg+w_sr*(loss_sr+loss_guide)+w_tf*loss_pf loss.backward() optimizer.step() loss_sum+=loss.item() if args.v: final_img=np.zeros(shape=(2*size,2*size*5)) iterator.set_postfix(loss=loss.item(),loss_seg=loss_seg.item(),loss_sr=loss_sr.item()) final_img[:,0:(2*size)]=outputs_seg.cpu().data.numpy()[0,0,size//2,:,:]*255", "pt.autograd.Variable(labels_seg).type(pt.FloatTensor).cuda().unsqueeze(1) labels_sr = pt.autograd.Variable(labels_sr).type(pt.FloatTensor).cuda().unsqueeze(1) outputs_seg,outputs_sr = model(inputs,guidance) loss_seg = lossfunc_seg(outputs_seg, labels_seg) loss_sr =", "gt_sum = label_list.sum() pr_gt_sum = np.sum(output_list[label_list == 1]) dice = 2 * pr_gt_sum", "+ gt_sum) dice_sum += dice hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Test Total dice:", "pr_gt_sum / (pr_sum + gt_sum) dice_sum += dice hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished.", "scheduler=pt.optim.lr_scheduler.ReduceLROnPlateau(optimizer,mode='max',patience=20) def ValModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): #", "output_list[output_list>=0.5]=1 pr_sum = output_list.sum() gt_sum = label_list.sum() pr_gt_sum = np.sum(output_list[label_list == 1]) dice", "dice = 2 * pr_gt_sum / (pr_sum + gt_sum) dice_sum += dice hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0))", "dice = 2 * pr_gt_sum / (pr_sum + gt_sum) dice_sum += dice if", "range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): weight_map[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=1 weight_map=1./weight_map for i,data in enumerate(test_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2]))", "model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('TestPhase_BraTS.png',final_img) pr_sum =", "output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('TestPhase_BraTS.png',final_img) pr_sum = output_list.sum() gt_sum = label_list.sum() pr_gt_sum", "pretrained model') parser.add_argument('-v', help=\"increase output verbosity\", action=\"store_true\") args = parser.parse_args() dataset_path=args.dataset_path lr=args.lr epoch=args.epoch", "dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for b in", "instead.'.format(batch_size)) model=pt.nn.DataParallel(model) if not pretrained_model=='': model.load_state_dict(pt.load(pretrained_model,map_location = 'cpu')) trainset=GuidedBraTSDataset3D(dataset_path,mode='train') valset=GuidedBraTSDataset3D(dataset_path,mode='val') testset=GuidedBraTSDataset3D(dataset_path,mode='test') train_dataset=pt.utils.data.DataLoader(trainset,batch_size=batch_size,shuffle=True,drop_last=True) val_dataset=pt.utils.data.DataLoader(valset,batch_size=1,shuffle=True,drop_last=True)", "* pr_gt_sum / (pr_sum + gt_sum) dice_sum += dice hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard", "dice>best_dice: best_dice=dice print('New best dice! Model saved to',model_path+'/PFSeg_3D_BraTS_patch-free_bs'+str(batch_size)+'_best.pt') pt.save(model.state_dict(), model_path+'/PFSeg_3D_BraTS_patch-free_bs'+str(batch_size)+'_best.pt') print('===TEST===>') TestModel() print('\\nBest", "# overlap0.5 for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): inputs3D = pt.autograd.Variable(inputs[:,a:(a+crop_size[0]),b:(b+crop_size[1]),c:(c+crop_size[2])]).type(pt.FloatTensor).cuda().unsqueeze(1)", "if batch_size<pt.cuda.device_count(): batch_size=pt.cuda.device_count() print('Batch size has to be larger than GPU#. Set to", "= pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for b", "lossfunc_sr(outputs_sr, labels_sr) loss_pf = lossfunc_pf(outputs_seg,outputs_sr,labels_seg*labels_sr) loss_guide=lossfunc_sr(mask*outputs_sr,mask*labels_sr) loss=lossfunc_dice(outputs_seg,labels_seg)+loss_seg+w_sr*(loss_sr+loss_guide)+w_tf*loss_pf loss.backward() optimizer.step() loss_sum+=loss.item() if args.v: final_img=np.zeros(shape=(2*size,2*size*5))", "jc_sum+=jaccard print(\"Finished. Total dice: \",dice_sum/len(val_dataset),'\\n') print(\"Finished. Avg Jaccard: \",jc_sum/len(val_dataset)) print(\"Finished. Avg hausdorff: \",hd_sum/len(val_dataset))", "from medpy.metric.binary import jc,hd95 from dataset.GuidedBraTSDataset3D import GuidedBraTSDataset3D # from loss.FALoss3D import FALoss3D", "import jc,hd95 from dataset.GuidedBraTSDataset3D import GuidedBraTSDataset3D # from loss.FALoss3D import FALoss3D import cv2", "import argparse from tqdm import tqdm # from tensorboardX import SummaryWriter crop_size=config.crop_size size=crop_size[2]", "with pt.no_grad(): outputs3D,_ = model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255", "dice hausdorff=hd95(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) jaccard=jc(output_list.squeeze(0).squeeze(0),label_list.squeeze(0).squeeze(0)) hd_sum+=hausdorff jc_sum+=jaccard print(\"Finished. Test Total dice: \",dice_sum/len(test_dataset),'\\n') print(\"Finished. Test Avg", "output_list.sum() gt_sum = label_list.sum() pr_gt_sum = np.sum(output_list[label_list == 1]) dice = 2 *", "the lossfunc') parser.add_argument('-w_tf', type=float, default=0.5, help='w_tf of the lossfunc') parser.add_argument('-load_pretrained',type=str,default='',help='load a pretrained model')", "cv2.imwrite('TestPhase_BraTS.png',final_img) pr_sum = output_list.sum() gt_sum = label_list.sum() pr_gt_sum = np.sum(output_list[label_list == 1]) dice", "weight_map=1./weight_map for i,data in enumerate(test_dataset): output_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) label_list=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) (inputs,labels,_,guidance,mask)=data labels3D = pt.autograd.Variable(labels).type(pt.FloatTensor).cuda().unsqueeze(1) guidance =", "labels_sr = pt.autograd.Variable(labels_sr).type(pt.FloatTensor).cuda().unsqueeze(1) outputs_seg,outputs_sr = model(inputs,guidance) loss_seg = lossfunc_seg(outputs_seg, labels_seg) loss_sr = lossfunc_sr(outputs_sr,", "pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask = pt.autograd.Variable(mask).type(pt.FloatTensor).cuda().unsqueeze(1) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for b in", "loss_sr = lossfunc_sr(outputs_sr, labels_sr) loss_pf = lossfunc_pf(outputs_seg,outputs_sr,labels_seg*labels_sr) loss_guide=lossfunc_sr(mask*outputs_sr,mask*labels_sr) loss=lossfunc_dice(outputs_seg,labels_seg)+loss_seg+w_sr*(loss_sr+loss_guide)+w_tf*loss_pf loss.backward() optimizer.step() loss_sum+=loss.item() if", "batch_size<pt.cuda.device_count(): batch_size=pt.cuda.device_count() print('Batch size has to be larger than GPU#. Set to {:d}", "parser.add_argument('-model_save_to',type=str,default='.',help='path to output') parser.add_argument('-bs', type=int, default=1, help='input batch size') parser.add_argument('-epoch', type=int, default=100, help='number", "lossfunc_seg=pt.nn.BCELoss() lossfunc_dice=BinaryDiceLoss() lossfunc_pf=TaskFusionLoss() optimizer = pt.optim.Adam(model.parameters(), lr=lr) # # scheduler = pt.optim.lr_scheduler.ExponentialLR(optimizer, gamma=0.99)", "\",hd_sum/len(val_dataset)) return dice_sum/len(val_dataset) def TestModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a in", "import config import argparse from tqdm import tqdm # from tensorboardX import SummaryWriter", "range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2): for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): inputs3D = pt.autograd.Variable(inputs[:,a:(a+crop_size[0]),b:(b+crop_size[1]),c:(c+crop_size[2])]).type(pt.FloatTensor).cuda().unsqueeze(1) with pt.no_grad(): outputs3D,_ = model(inputs3D,guidance)", "\",dice_sum/len(val_dataset),'\\n') print(\"Finished. Avg Jaccard: \",jc_sum/len(val_dataset)) print(\"Finished. Avg hausdorff: \",hd_sum/len(val_dataset)) return dice_sum/len(val_dataset) def TestModel():", "to be larger than GPU#. Set to {:d} instead.'.format(batch_size)) model=pt.nn.DataParallel(model) if not pretrained_model=='':", "jc,hd95 from dataset.GuidedBraTSDataset3D import GuidedBraTSDataset3D # from loss.FALoss3D import FALoss3D import cv2 from", "loss_sum=0 print('\\n==>Epoch',x,': lr=',optimizer.param_groups[0]['lr'],'==>\\n') for data in iterator: (inputs,labels_seg,labels_sr,guidance,mask)=data optimizer.zero_grad() inputs = pt.autograd.Variable(inputs).type(pt.FloatTensor).cuda().unsqueeze(1) guidance", "(pr_sum + gt_sum) dice_sum += dice if args.v: final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('ValPhase_BraTS.png',final_img) print(\"dice:\",dice)", "pt.no_grad(): outputs3D,_ = model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map output_list[output_list<0.5]=0 output_list[output_list>=0.5]=1 final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255", "labels_sr) loss_pf = lossfunc_pf(outputs_seg,outputs_sr,labels_seg*labels_sr) loss_guide=lossfunc_sr(mask*outputs_sr,mask*labels_sr) loss=lossfunc_dice(outputs_seg,labels_seg)+loss_seg+w_sr*(loss_sr+loss_guide)+w_tf*loss_pf loss.backward() optimizer.step() loss_sum+=loss.item() if args.v: final_img=np.zeros(shape=(2*size,2*size*5)) iterator.set_postfix(loss=loss.item(),loss_seg=loss_seg.item(),loss_sr=loss_sr.item())", "for c in range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): inputs3D = pt.autograd.Variable(inputs[:,a:(a+crop_size[0]),b:(b+crop_size[1]),c:(c+crop_size[2])]).type(pt.FloatTensor).cuda().unsqueeze(1) with pt.no_grad(): outputs3D,_ = model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy())", "type=float, default=0.5, help='w_sr of the lossfunc') parser.add_argument('-w_tf', type=float, default=0.5, help='w_tf of the lossfunc')", "range(0,img_size[2]-crop_size[2]+1,crop_size[2]//2): inputs3D = pt.autograd.Variable(inputs[:,a:(a+crop_size[0]),b:(b+crop_size[1]),c:(c+crop_size[2])]).type(pt.FloatTensor).cuda().unsqueeze(1) with pt.no_grad(): outputs3D,_ = model(inputs3D,guidance) outputs3D=np.array(outputs3D.cpu().data.numpy()) output_list[:,:,(2*a):(2*(a+crop_size[0])),(2*b):(2*(b+crop_size[1])),(2*c):(2*(c+crop_size[2]))]+=outputs3D label_list=np.array(labels3D.cpu().data.numpy()) output_list=np.array(output_list)*weight_map", "from loss.DiceLoss import BinaryDiceLoss from config import config import argparse from tqdm import", "epoch',x,'==>\\n') print('===VAL===>') dice=ValModel() scheduler.step(dice) if dice>best_dice: best_dice=dice print('New best dice! Model saved to',model_path+'/PFSeg_3D_BraTS_patch-free_bs'+str(batch_size)+'_best.pt')", "/ (pr_sum + gt_sum) dice_sum += dice if args.v: final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('ValPhase_BraTS.png',final_img)", "hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5 for b in range(0,img_size[1]-crop_size[1]+1,crop_size[1]//2):", "rate') parser.add_argument('-w_sr', type=float, default=0.5, help='w_sr of the lossfunc') parser.add_argument('-w_tf', type=float, default=0.5, help='w_tf of", "Jaccard: \",jc_sum/len(test_dataset)) print(\"Finished. Test Avg hausdorff: \",hd_sum/len(test_dataset)) return dice_sum/len(test_dataset) best_dice=0 iterator=tqdm(train_dataset, ncols=100) for", "final_img=np.zeros(shape=(2*img_size[1],2*2*img_size[2])) final_img[:,:2*img_size[2]]=output_list[0,0,64,:,:]*255 final_img[:,2*img_size[2]:]=label_list[0,0,64,:,:]*255 cv2.imwrite('TestPhase_BraTS.png',final_img) pr_sum = output_list.sum() gt_sum = label_list.sum() pr_gt_sum = np.sum(output_list[label_list", "for data in iterator: (inputs,labels_seg,labels_sr,guidance,mask)=data optimizer.zero_grad() inputs = pt.autograd.Variable(inputs).type(pt.FloatTensor).cuda().unsqueeze(1) guidance = pt.autograd.Variable(guidance).type(pt.FloatTensor).cuda().unsqueeze(1) mask", "torch as pt import numpy as np from model.PFSeg import PFSeg3D from medpy.metric.binary", "loss.DiceLoss import BinaryDiceLoss from config import config import argparse from tqdm import tqdm", "def ValModel(): model.eval() dice_sum=0 hd_sum=0 jc_sum=0 weight_map=np.zeros((1,1,2*img_size[0],2*img_size[1],2*img_size[2])) for a in range(0,img_size[0]-crop_size[0]+1,crop_size[0]//2): # overlap0.5", "jc_sum+=jaccard print(\"Finished. Test Total dice: \",dice_sum/len(test_dataset),'\\n') print(\"Finished. Test Avg Jaccard: \",jc_sum/len(test_dataset)) print(\"Finished. Test", "output verbosity\", action=\"store_true\") args = parser.parse_args() dataset_path=args.dataset_path lr=args.lr epoch=args.epoch batch_size=args.bs model_path=args.model_save_to w_sr=args.w_sr w_tf=args.w_tf", "pretrained_model=='': model.load_state_dict(pt.load(pretrained_model,map_location = 'cpu')) trainset=GuidedBraTSDataset3D(dataset_path,mode='train') valset=GuidedBraTSDataset3D(dataset_path,mode='val') testset=GuidedBraTSDataset3D(dataset_path,mode='test') train_dataset=pt.utils.data.DataLoader(trainset,batch_size=batch_size,shuffle=True,drop_last=True) val_dataset=pt.utils.data.DataLoader(valset,batch_size=1,shuffle=True,drop_last=True) test_dataset=pt.utils.data.DataLoader(testset,batch_size=1,shuffle=True,drop_last=True) lossfunc_sr=pt.nn.MSELoss() lossfunc_seg=pt.nn.BCELoss() lossfunc_dice=BinaryDiceLoss()" ]
[ "module contains the class definition for the problem's operating point. problems.py: This module", "that relate to geometry, and the class definitions for different types of geometries.", "stored in DAT files. This package contains the following modules: __init__.py: This module", "folder contains a collection of airfoils whose coordinates are stored in DAT files.", "current_operating_point.py: This module contains the class definition for the problem's operating point. problems.py:", "following directories: airfoils: This folder contains a collection of airfoils whose coordinates are", "class definition of this package's unsteady ring vortex lattice solver. \"\"\" import pterasoftware.aerodynamics", "to geometry, and the class definitions for different types of geometries. meshing.py: This", "lattice solver. steady_ring_vortex_lattice_method.py: This module contains the class definition of this package's steady", "This package contains the following modules: __init__.py: This module is this package's initialization", "vortex class definitions. functions.py: This module contains functions used by other modules in", "to problems. movement.py: This module contains the class definitions for the problem's movement.", "module contains the class definition of this package's steady ring vortex lattice solver.", "\"\"\" import pterasoftware.aerodynamics import pterasoftware.airfoils import pterasoftware.geometry import pterasoftware.meshing import pterasoftware.movement import pterasoftware.operating_point", "geometry, and the class definitions for different types of geometries. meshing.py: This module", "pterasoftware.airfoils import pterasoftware.geometry import pterasoftware.meshing import pterasoftware.movement import pterasoftware.operating_point import pterasoftware.output import pterasoftware.problems", "Ptera Software. This package contains the following subpackages: None This package contains the", "Software. This package contains the following subpackages: None This package contains the following", "module contains functions used by other modules in the pterasoftware package. geometry.py: This", "types of problems. steady_horseshoe_vortex_lattice_method.py: This module contains the class definition of this package's", "contains the class definition for the problem's operating point. problems.py: This module contains", "geometries. meshing.py: This module contains useful functions for creating meshes. output.py: This module", "of problems. steady_horseshoe_vortex_lattice_method.py: This module contains the class definition of this package's steady", "solver. steady_ring_vortex_lattice_method.py: This module contains the class definition of this package's steady ring", "for the Ptera Software. This package contains the following subpackages: None This package", "this module's documentation. \"\"\"This package contains all the source code for the Ptera", "this package's unsteady ring vortex lattice solver. \"\"\" import pterasoftware.aerodynamics import pterasoftware.airfoils import", "files. This package contains the following modules: __init__.py: This module is this package's", "the problem's movement. current_operating_point.py: This module contains the class definition for the problem's", "meshes. output.py: This module contains useful functions for visualizing solutions to problems. movement.py:", "package's initialization script. aerodynamics.py: This module contains vortex class definitions. functions.py: This module", "definitions for the problem's movement. current_operating_point.py: This module contains the class definition for", "functions that relate to geometry, and the class definitions for different types of", "module contains the class definitions for different types of problems. steady_horseshoe_vortex_lattice_method.py: This module", "this package's steady ring vortex lattice solver. unsteady_ring_vortex_lattice_method.py: This module contains the class", "steady_ring_vortex_lattice_method.py: This module contains the class definition of this package's steady ring vortex", "output.py: This module contains useful functions for visualizing solutions to problems. movement.py: This", "This folder contains a collection of airfoils whose coordinates are stored in DAT", "steady horseshoe vortex lattice solver. steady_ring_vortex_lattice_method.py: This module contains the class definition of", "useful functions that relate to geometry, and the class definitions for different types", "class definition of this package's steady horseshoe vortex lattice solver. steady_ring_vortex_lattice_method.py: This module", "import pterasoftware.movement import pterasoftware.operating_point import pterasoftware.output import pterasoftware.problems import pterasoftware.steady_horseshoe_vortex_lattice_method import pterasoftware.steady_ring_vortex_lattice_method import", "geometry.py: This module contains useful functions that relate to geometry, and the class", "package's unsteady ring vortex lattice solver. \"\"\" import pterasoftware.aerodynamics import pterasoftware.airfoils import pterasoftware.geometry", "a collection of airfoils whose coordinates are stored in DAT files. This package", "meshing.py: This module contains useful functions for creating meshes. output.py: This module contains", "different types of problems. steady_horseshoe_vortex_lattice_method.py: This module contains the class definition of this", "contains the class definition of this package's unsteady ring vortex lattice solver. \"\"\"", "This module contains the class definitions for the problem's movement. current_operating_point.py: This module", "contains the following directories: airfoils: This folder contains a collection of airfoils whose", "module is this package's initialization script. aerodynamics.py: This module contains vortex class definitions.", "class definition of this package's steady ring vortex lattice solver. unsteady_ring_vortex_lattice_method.py: This module", "This module contains functions used by other modules in the pterasoftware package. geometry.py:", "the Ptera Software. This package contains the following subpackages: None This package contains", "solutions to problems. movement.py: This module contains the class definitions for the problem's", "initialization script. aerodynamics.py: This module contains vortex class definitions. functions.py: This module contains", "functions used by other modules in the pterasoftware package. geometry.py: This module contains", "This module contains useful functions for visualizing solutions to problems. movement.py: This module", "lattice solver. unsteady_ring_vortex_lattice_method.py: This module contains the class definition of this package's unsteady", "definition of this package's unsteady ring vortex lattice solver. \"\"\" import pterasoftware.aerodynamics import", "the following directories: airfoils: This folder contains a collection of airfoils whose coordinates", "the pterasoftware package. geometry.py: This module contains useful functions that relate to geometry,", "This module contains useful functions that relate to geometry, and the class definitions", "class definitions for different types of geometries. meshing.py: This module contains useful functions", "coordinates are stored in DAT files. This package contains the following modules: __init__.py:", "definition of this package's steady ring vortex lattice solver. unsteady_ring_vortex_lattice_method.py: This module contains", "movement.py: This module contains the class definitions for the problem's movement. current_operating_point.py: This", "class definitions for the problem's movement. current_operating_point.py: This module contains the class definition", "the class definition for the problem's operating point. problems.py: This module contains the", "the following subpackages: None This package contains the following directories: airfoils: This folder", "for the problem's movement. current_operating_point.py: This module contains the class definition for the", "solver. \"\"\" import pterasoftware.aerodynamics import pterasoftware.airfoils import pterasoftware.geometry import pterasoftware.meshing import pterasoftware.movement import", "module contains useful functions for visualizing solutions to problems. movement.py: This module contains", "vortex lattice solver. steady_ring_vortex_lattice_method.py: This module contains the class definition of this package's", "unsteady ring vortex lattice solver. \"\"\" import pterasoftware.aerodynamics import pterasoftware.airfoils import pterasoftware.geometry import", "This module contains the class definition of this package's unsteady ring vortex lattice", "following subpackages: None This package contains the following directories: airfoils: This folder contains", "package. geometry.py: This module contains useful functions that relate to geometry, and the", "the class definitions for the problem's movement. current_operating_point.py: This module contains the class", "source code for the Ptera Software. This package contains the following subpackages: None", "relate to geometry, and the class definitions for different types of geometries. meshing.py:", "for different types of problems. steady_horseshoe_vortex_lattice_method.py: This module contains the class definition of", "class definitions for different types of problems. steady_horseshoe_vortex_lattice_method.py: This module contains the class", "documentation. \"\"\"This package contains all the source code for the Ptera Software. This", "operating point. problems.py: This module contains the class definitions for different types of", "class definitions. functions.py: This module contains functions used by other modules in the", "This module is this package's initialization script. aerodynamics.py: This module contains vortex class", "class definition for the problem's operating point. problems.py: This module contains the class", "for visualizing solutions to problems. movement.py: This module contains the class definitions for", "the problem's operating point. problems.py: This module contains the class definitions for different", "the class definition of this package's steady ring vortex lattice solver. unsteady_ring_vortex_lattice_method.py: This", "import pterasoftware.geometry import pterasoftware.meshing import pterasoftware.movement import pterasoftware.operating_point import pterasoftware.output import pterasoftware.problems import", "of airfoils whose coordinates are stored in DAT files. This package contains the", "code for the Ptera Software. This package contains the following subpackages: None This", "unsteady_ring_vortex_lattice_method.py: This module contains the class definition of this package's unsteady ring vortex", "\"\"\"This package contains all the source code for the Ptera Software. This package", "contains useful functions for creating meshes. output.py: This module contains useful functions for", "the source code for the Ptera Software. This package contains the following subpackages:", "__init__.py: This module is this package's initialization script. aerodynamics.py: This module contains vortex", "lattice solver. \"\"\" import pterasoftware.aerodynamics import pterasoftware.airfoils import pterasoftware.geometry import pterasoftware.meshing import pterasoftware.movement", "functions.py: This module contains functions used by other modules in the pterasoftware package.", "contains the class definitions for the problem's movement. current_operating_point.py: This module contains the", "# ToDo: Update this module's documentation. \"\"\"This package contains all the source code", "problem's operating point. problems.py: This module contains the class definitions for different types", "is this package's initialization script. aerodynamics.py: This module contains vortex class definitions. functions.py:", "in the pterasoftware package. geometry.py: This module contains useful functions that relate to", "contains the class definitions for different types of problems. steady_horseshoe_vortex_lattice_method.py: This module contains", "useful functions for visualizing solutions to problems. movement.py: This module contains the class", "module contains the class definitions for the problem's movement. current_operating_point.py: This module contains", "import pterasoftware.aerodynamics import pterasoftware.airfoils import pterasoftware.geometry import pterasoftware.meshing import pterasoftware.movement import pterasoftware.operating_point import", "contains vortex class definitions. functions.py: This module contains functions used by other modules", "problem's movement. current_operating_point.py: This module contains the class definition for the problem's operating", "pterasoftware.geometry import pterasoftware.meshing import pterasoftware.movement import pterasoftware.operating_point import pterasoftware.output import pterasoftware.problems import pterasoftware.steady_horseshoe_vortex_lattice_method", "different types of geometries. meshing.py: This module contains useful functions for creating meshes.", "are stored in DAT files. This package contains the following modules: __init__.py: This", "definitions for different types of problems. steady_horseshoe_vortex_lattice_method.py: This module contains the class definition", "solver. unsteady_ring_vortex_lattice_method.py: This module contains the class definition of this package's unsteady ring", "package contains the following subpackages: None This package contains the following directories: airfoils:", "airfoils whose coordinates are stored in DAT files. This package contains the following", "problems. steady_horseshoe_vortex_lattice_method.py: This module contains the class definition of this package's steady horseshoe", "definition of this package's steady horseshoe vortex lattice solver. steady_ring_vortex_lattice_method.py: This module contains", "contains useful functions that relate to geometry, and the class definitions for different", "aerodynamics.py: This module contains vortex class definitions. functions.py: This module contains functions used", "modules: __init__.py: This module is this package's initialization script. aerodynamics.py: This module contains", "steady_horseshoe_vortex_lattice_method.py: This module contains the class definition of this package's steady horseshoe vortex", "Update this module's documentation. \"\"\"This package contains all the source code for the", "this package's steady horseshoe vortex lattice solver. steady_ring_vortex_lattice_method.py: This module contains the class", "of this package's steady ring vortex lattice solver. unsteady_ring_vortex_lattice_method.py: This module contains the", "This module contains the class definition of this package's steady ring vortex lattice", "script. aerodynamics.py: This module contains vortex class definitions. functions.py: This module contains functions", "functions for creating meshes. output.py: This module contains useful functions for visualizing solutions", "DAT files. This package contains the following modules: __init__.py: This module is this", "This package contains the following directories: airfoils: This folder contains a collection of", "contains a collection of airfoils whose coordinates are stored in DAT files. This", "for the problem's operating point. problems.py: This module contains the class definitions for", "This module contains the class definition of this package's steady horseshoe vortex lattice", "in DAT files. This package contains the following modules: __init__.py: This module is", "following modules: __init__.py: This module is this package's initialization script. aerodynamics.py: This module", "None This package contains the following directories: airfoils: This folder contains a collection", "pterasoftware package. geometry.py: This module contains useful functions that relate to geometry, and", "module contains the class definition of this package's steady horseshoe vortex lattice solver.", "contains functions used by other modules in the pterasoftware package. geometry.py: This module", "vortex lattice solver. \"\"\" import pterasoftware.aerodynamics import pterasoftware.airfoils import pterasoftware.geometry import pterasoftware.meshing import", "subpackages: None This package contains the following directories: airfoils: This folder contains a", "contains all the source code for the Ptera Software. This package contains the", "airfoils: This folder contains a collection of airfoils whose coordinates are stored in", "whose coordinates are stored in DAT files. This package contains the following modules:", "import pterasoftware.airfoils import pterasoftware.geometry import pterasoftware.meshing import pterasoftware.movement import pterasoftware.operating_point import pterasoftware.output import", "This module contains vortex class definitions. functions.py: This module contains functions used by", "definitions. functions.py: This module contains functions used by other modules in the pterasoftware", "ToDo: Update this module's documentation. \"\"\"This package contains all the source code for", "visualizing solutions to problems. movement.py: This module contains the class definitions for the", "pterasoftware.movement import pterasoftware.operating_point import pterasoftware.output import pterasoftware.problems import pterasoftware.steady_horseshoe_vortex_lattice_method import pterasoftware.steady_ring_vortex_lattice_method import pterasoftware.unsteady_ring_vortex_lattice_method", "modules in the pterasoftware package. geometry.py: This module contains useful functions that relate", "contains the class definition of this package's steady horseshoe vortex lattice solver. steady_ring_vortex_lattice_method.py:", "pterasoftware.meshing import pterasoftware.movement import pterasoftware.operating_point import pterasoftware.output import pterasoftware.problems import pterasoftware.steady_horseshoe_vortex_lattice_method import pterasoftware.steady_ring_vortex_lattice_method", "the class definitions for different types of geometries. meshing.py: This module contains useful", "definition for the problem's operating point. problems.py: This module contains the class definitions", "import pterasoftware.meshing import pterasoftware.movement import pterasoftware.operating_point import pterasoftware.output import pterasoftware.problems import pterasoftware.steady_horseshoe_vortex_lattice_method import", "used by other modules in the pterasoftware package. geometry.py: This module contains useful", "collection of airfoils whose coordinates are stored in DAT files. This package contains", "module contains the class definition of this package's unsteady ring vortex lattice solver.", "useful functions for creating meshes. output.py: This module contains useful functions for visualizing", "pterasoftware.aerodynamics import pterasoftware.airfoils import pterasoftware.geometry import pterasoftware.meshing import pterasoftware.movement import pterasoftware.operating_point import pterasoftware.output", "problems. movement.py: This module contains the class definitions for the problem's movement. current_operating_point.py:", "for creating meshes. output.py: This module contains useful functions for visualizing solutions to", "types of geometries. meshing.py: This module contains useful functions for creating meshes. output.py:", "point. problems.py: This module contains the class definitions for different types of problems.", "and the class definitions for different types of geometries. meshing.py: This module contains", "by other modules in the pterasoftware package. geometry.py: This module contains useful functions", "contains useful functions for visualizing solutions to problems. movement.py: This module contains the", "the class definition of this package's unsteady ring vortex lattice solver. \"\"\" import", "module contains vortex class definitions. functions.py: This module contains functions used by other", "package's steady ring vortex lattice solver. unsteady_ring_vortex_lattice_method.py: This module contains the class definition", "package contains the following directories: airfoils: This folder contains a collection of airfoils", "horseshoe vortex lattice solver. steady_ring_vortex_lattice_method.py: This module contains the class definition of this", "ring vortex lattice solver. \"\"\" import pterasoftware.aerodynamics import pterasoftware.airfoils import pterasoftware.geometry import pterasoftware.meshing", "movement. current_operating_point.py: This module contains the class definition for the problem's operating point.", "This package contains the following subpackages: None This package contains the following directories:", "of this package's steady horseshoe vortex lattice solver. steady_ring_vortex_lattice_method.py: This module contains the", "functions for visualizing solutions to problems. movement.py: This module contains the class definitions", "all the source code for the Ptera Software. This package contains the following", "module contains useful functions for creating meshes. output.py: This module contains useful functions", "package's steady horseshoe vortex lattice solver. steady_ring_vortex_lattice_method.py: This module contains the class definition", "ring vortex lattice solver. unsteady_ring_vortex_lattice_method.py: This module contains the class definition of this", "<gh_stars>10-100 # ToDo: Update this module's documentation. \"\"\"This package contains all the source", "package contains the following modules: __init__.py: This module is this package's initialization script.", "definitions for different types of geometries. meshing.py: This module contains useful functions for", "problems.py: This module contains the class definitions for different types of problems. steady_horseshoe_vortex_lattice_method.py:", "contains the following subpackages: None This package contains the following directories: airfoils: This", "contains the class definition of this package's steady ring vortex lattice solver. unsteady_ring_vortex_lattice_method.py:", "directories: airfoils: This folder contains a collection of airfoils whose coordinates are stored", "this package's initialization script. aerodynamics.py: This module contains vortex class definitions. functions.py: This", "other modules in the pterasoftware package. geometry.py: This module contains useful functions that", "the following modules: __init__.py: This module is this package's initialization script. aerodynamics.py: This", "contains the following modules: __init__.py: This module is this package's initialization script. aerodynamics.py:", "package contains all the source code for the Ptera Software. This package contains", "for different types of geometries. meshing.py: This module contains useful functions for creating", "vortex lattice solver. unsteady_ring_vortex_lattice_method.py: This module contains the class definition of this package's", "of geometries. meshing.py: This module contains useful functions for creating meshes. output.py: This", "of this package's unsteady ring vortex lattice solver. \"\"\" import pterasoftware.aerodynamics import pterasoftware.airfoils", "This module contains the class definition for the problem's operating point. problems.py: This", "creating meshes. output.py: This module contains useful functions for visualizing solutions to problems.", "the class definition of this package's steady horseshoe vortex lattice solver. steady_ring_vortex_lattice_method.py: This", "module's documentation. \"\"\"This package contains all the source code for the Ptera Software.", "the class definitions for different types of problems. steady_horseshoe_vortex_lattice_method.py: This module contains the", "module contains useful functions that relate to geometry, and the class definitions for", "This module contains useful functions for creating meshes. output.py: This module contains useful", "This module contains the class definitions for different types of problems. steady_horseshoe_vortex_lattice_method.py: This", "steady ring vortex lattice solver. unsteady_ring_vortex_lattice_method.py: This module contains the class definition of" ]
[ "updated_date__year=value.year, updated_date__month=value.month, updated_date__day=value.day, ) class TaskFilter(FilterSet): status = ChoiceFilter(method='completed_custom_filter', choices=STATUS_CHOICES_CUSTOM) class Meta: model", "['status'] def completed_custom_filter(self, queryset, name, value): if value == \"COMPLETED\": return queryset.filter(status=\"COMPLETED\") elif", "class TaskFilter(FilterSet): status = ChoiceFilter(method='completed_custom_filter', choices=STATUS_CHOICES_CUSTOM) class Meta: model = Task fields =", "value): if value == \"COMPLETED\": return queryset.filter(status=\"COMPLETED\") elif value == \"NOT_COMPLETED\": return queryset.filter(~Q(status=\"COMPLETED\"))", "ChoiceFilter, DateFilter from tasks.models import STATUS_CHOICES, Task, History from django_filters.rest_framework import FilterSet from", "( (\"COMPLETED\",\"COMPLETED\"), (\"NOT_COMPLETED\",\"NOT_COMPLETED\") ) class HistoryFilter(FilterSet): created_date = DateFilter(method='custom_date_filter') class Meta: model =", "= ChoiceFilter(method='completed_custom_filter', choices=STATUS_CHOICES_CUSTOM) class Meta: model = Task fields = ['status'] def completed_custom_filter(self,", "updated_date__day=value.day, ) class TaskFilter(FilterSet): status = ChoiceFilter(method='completed_custom_filter', choices=STATUS_CHOICES_CUSTOM) class Meta: model = Task", "from tasks.models import STATUS_CHOICES, Task, History from django_filters.rest_framework import FilterSet from django.db.models import", "name, value): return queryset.filter( updated_date__year=value.year, updated_date__month=value.month, updated_date__day=value.day, ) class TaskFilter(FilterSet): status = ChoiceFilter(method='completed_custom_filter',", "= History fields = ['status_current', 'status_previous', 'created_date'] def custom_date_filter(self, queryset, name, value): return", "model = History fields = ['status_current', 'status_previous', 'created_date'] def custom_date_filter(self, queryset, name, value):", "= DateFilter(method='custom_date_filter') class Meta: model = History fields = ['status_current', 'status_previous', 'created_date'] def", "STATUS_CHOICES, Task, History from django_filters.rest_framework import FilterSet from django.db.models import Q STATUS_CHOICES_CUSTOM =", "Q STATUS_CHOICES_CUSTOM = ( (\"COMPLETED\",\"COMPLETED\"), (\"NOT_COMPLETED\",\"NOT_COMPLETED\") ) class HistoryFilter(FilterSet): created_date = DateFilter(method='custom_date_filter') class", "custom_date_filter(self, queryset, name, value): return queryset.filter( updated_date__year=value.year, updated_date__month=value.month, updated_date__day=value.day, ) class TaskFilter(FilterSet): status", "TaskFilter(FilterSet): status = ChoiceFilter(method='completed_custom_filter', choices=STATUS_CHOICES_CUSTOM) class Meta: model = Task fields = ['status']", "FilterSet from django.db.models import Q STATUS_CHOICES_CUSTOM = ( (\"COMPLETED\",\"COMPLETED\"), (\"NOT_COMPLETED\",\"NOT_COMPLETED\") ) class HistoryFilter(FilterSet):", "class HistoryFilter(FilterSet): created_date = DateFilter(method='custom_date_filter') class Meta: model = History fields = ['status_current',", "from django.db.models import Q STATUS_CHOICES_CUSTOM = ( (\"COMPLETED\",\"COMPLETED\"), (\"NOT_COMPLETED\",\"NOT_COMPLETED\") ) class HistoryFilter(FilterSet): created_date", "STATUS_CHOICES_CUSTOM = ( (\"COMPLETED\",\"COMPLETED\"), (\"NOT_COMPLETED\",\"NOT_COMPLETED\") ) class HistoryFilter(FilterSet): created_date = DateFilter(method='custom_date_filter') class Meta:", "django.db.models import Q STATUS_CHOICES_CUSTOM = ( (\"COMPLETED\",\"COMPLETED\"), (\"NOT_COMPLETED\",\"NOT_COMPLETED\") ) class HistoryFilter(FilterSet): created_date =", "status = ChoiceFilter(method='completed_custom_filter', choices=STATUS_CHOICES_CUSTOM) class Meta: model = Task fields = ['status'] def", "Meta: model = Task fields = ['status'] def completed_custom_filter(self, queryset, name, value): if", "queryset, name, value): if value == \"COMPLETED\": return queryset.filter(status=\"COMPLETED\") elif value == \"NOT_COMPLETED\":", "Meta: model = History fields = ['status_current', 'status_previous', 'created_date'] def custom_date_filter(self, queryset, name,", "import Q STATUS_CHOICES_CUSTOM = ( (\"COMPLETED\",\"COMPLETED\"), (\"NOT_COMPLETED\",\"NOT_COMPLETED\") ) class HistoryFilter(FilterSet): created_date = DateFilter(method='custom_date_filter')", "django_filters.rest_framework import FilterSet from django.db.models import Q STATUS_CHOICES_CUSTOM = ( (\"COMPLETED\",\"COMPLETED\"), (\"NOT_COMPLETED\",\"NOT_COMPLETED\") )", "import STATUS_CHOICES, Task, History from django_filters.rest_framework import FilterSet from django.db.models import Q STATUS_CHOICES_CUSTOM", ") class HistoryFilter(FilterSet): created_date = DateFilter(method='custom_date_filter') class Meta: model = History fields =", "completed_custom_filter(self, queryset, name, value): if value == \"COMPLETED\": return queryset.filter(status=\"COMPLETED\") elif value ==", "DateFilter(method='custom_date_filter') class Meta: model = History fields = ['status_current', 'status_previous', 'created_date'] def custom_date_filter(self,", "def completed_custom_filter(self, queryset, name, value): if value == \"COMPLETED\": return queryset.filter(status=\"COMPLETED\") elif value", "value): return queryset.filter( updated_date__year=value.year, updated_date__month=value.month, updated_date__day=value.day, ) class TaskFilter(FilterSet): status = ChoiceFilter(method='completed_custom_filter', choices=STATUS_CHOICES_CUSTOM)", "History fields = ['status_current', 'status_previous', 'created_date'] def custom_date_filter(self, queryset, name, value): return queryset.filter(", "= ['status'] def completed_custom_filter(self, queryset, name, value): if value == \"COMPLETED\": return queryset.filter(status=\"COMPLETED\")", "updated_date__month=value.month, updated_date__day=value.day, ) class TaskFilter(FilterSet): status = ChoiceFilter(method='completed_custom_filter', choices=STATUS_CHOICES_CUSTOM) class Meta: model =", "ChoiceFilter(method='completed_custom_filter', choices=STATUS_CHOICES_CUSTOM) class Meta: model = Task fields = ['status'] def completed_custom_filter(self, queryset,", "from django_filters.rest_framework import FilterSet from django.db.models import Q STATUS_CHOICES_CUSTOM = ( (\"COMPLETED\",\"COMPLETED\"), (\"NOT_COMPLETED\",\"NOT_COMPLETED\")", "= ['status_current', 'status_previous', 'created_date'] def custom_date_filter(self, queryset, name, value): return queryset.filter( updated_date__year=value.year, updated_date__month=value.month,", "HistoryFilter(FilterSet): created_date = DateFilter(method='custom_date_filter') class Meta: model = History fields = ['status_current', 'status_previous',", "fields = ['status_current', 'status_previous', 'created_date'] def custom_date_filter(self, queryset, name, value): return queryset.filter( updated_date__year=value.year,", "DateFilter from tasks.models import STATUS_CHOICES, Task, History from django_filters.rest_framework import FilterSet from django.db.models", "queryset.filter( updated_date__year=value.year, updated_date__month=value.month, updated_date__day=value.day, ) class TaskFilter(FilterSet): status = ChoiceFilter(method='completed_custom_filter', choices=STATUS_CHOICES_CUSTOM) class Meta:", ") class TaskFilter(FilterSet): status = ChoiceFilter(method='completed_custom_filter', choices=STATUS_CHOICES_CUSTOM) class Meta: model = Task fields", "class Meta: model = History fields = ['status_current', 'status_previous', 'created_date'] def custom_date_filter(self, queryset,", "'status_previous', 'created_date'] def custom_date_filter(self, queryset, name, value): return queryset.filter( updated_date__year=value.year, updated_date__month=value.month, updated_date__day=value.day, )", "class Meta: model = Task fields = ['status'] def completed_custom_filter(self, queryset, name, value):", "created_date = DateFilter(method='custom_date_filter') class Meta: model = History fields = ['status_current', 'status_previous', 'created_date']", "queryset, name, value): return queryset.filter( updated_date__year=value.year, updated_date__month=value.month, updated_date__day=value.day, ) class TaskFilter(FilterSet): status =", "= ( (\"COMPLETED\",\"COMPLETED\"), (\"NOT_COMPLETED\",\"NOT_COMPLETED\") ) class HistoryFilter(FilterSet): created_date = DateFilter(method='custom_date_filter') class Meta: model", "def custom_date_filter(self, queryset, name, value): return queryset.filter( updated_date__year=value.year, updated_date__month=value.month, updated_date__day=value.day, ) class TaskFilter(FilterSet):", "tasks.models import STATUS_CHOICES, Task, History from django_filters.rest_framework import FilterSet from django.db.models import Q", "(\"COMPLETED\",\"COMPLETED\"), (\"NOT_COMPLETED\",\"NOT_COMPLETED\") ) class HistoryFilter(FilterSet): created_date = DateFilter(method='custom_date_filter') class Meta: model = History", "model = Task fields = ['status'] def completed_custom_filter(self, queryset, name, value): if value", "name, value): if value == \"COMPLETED\": return queryset.filter(status=\"COMPLETED\") elif value == \"NOT_COMPLETED\": return", "Task fields = ['status'] def completed_custom_filter(self, queryset, name, value): if value == \"COMPLETED\":", "django_filters.filters import ChoiceFilter, DateFilter from tasks.models import STATUS_CHOICES, Task, History from django_filters.rest_framework import", "import ChoiceFilter, DateFilter from tasks.models import STATUS_CHOICES, Task, History from django_filters.rest_framework import FilterSet", "['status_current', 'status_previous', 'created_date'] def custom_date_filter(self, queryset, name, value): return queryset.filter( updated_date__year=value.year, updated_date__month=value.month, updated_date__day=value.day,", "History from django_filters.rest_framework import FilterSet from django.db.models import Q STATUS_CHOICES_CUSTOM = ( (\"COMPLETED\",\"COMPLETED\"),", "choices=STATUS_CHOICES_CUSTOM) class Meta: model = Task fields = ['status'] def completed_custom_filter(self, queryset, name,", "= Task fields = ['status'] def completed_custom_filter(self, queryset, name, value): if value ==", "(\"NOT_COMPLETED\",\"NOT_COMPLETED\") ) class HistoryFilter(FilterSet): created_date = DateFilter(method='custom_date_filter') class Meta: model = History fields", "return queryset.filter( updated_date__year=value.year, updated_date__month=value.month, updated_date__day=value.day, ) class TaskFilter(FilterSet): status = ChoiceFilter(method='completed_custom_filter', choices=STATUS_CHOICES_CUSTOM) class", "fields = ['status'] def completed_custom_filter(self, queryset, name, value): if value == \"COMPLETED\": return", "from django_filters.filters import ChoiceFilter, DateFilter from tasks.models import STATUS_CHOICES, Task, History from django_filters.rest_framework", "import FilterSet from django.db.models import Q STATUS_CHOICES_CUSTOM = ( (\"COMPLETED\",\"COMPLETED\"), (\"NOT_COMPLETED\",\"NOT_COMPLETED\") ) class", "<reponame>kunatastic/kunatastic-task-manager from django_filters.filters import ChoiceFilter, DateFilter from tasks.models import STATUS_CHOICES, Task, History from", "'created_date'] def custom_date_filter(self, queryset, name, value): return queryset.filter( updated_date__year=value.year, updated_date__month=value.month, updated_date__day=value.day, ) class", "Task, History from django_filters.rest_framework import FilterSet from django.db.models import Q STATUS_CHOICES_CUSTOM = (" ]
[ "becomes: ember generate route foobar --pod ''' command = ['ember', cmd_name] + list(args)", "path to the project's Ember app.''' return abspath(join( settings.BASE_DIR, cls.get_setting('EMBER_APP_PATH'))) def notify(self, some_text):", "no setting available.''' return settings.EMBER_TOOLKIT.get(key, DEFAULT_SETTINGS[key]) @classmethod def get_full_ember_path(cls): '''Return the full, absolute", "Model._meta.app_label + '.*' if key in model_name_set or app_star in model_name_set: model_set.add(Model) return", "for Model in app_config.get_models(): key = Model._meta.app_label + '.' + Model.__name__ app_star =", "@classmethod def get_setting(cls, key): '''Get a setting from the user's project by key,", "'EMBER_APP_NAME': None, 'API_PATH': None, 'EMBER_APP_PATH': 'client', 'MODELS_TO_SYNC': None } class EmberCommand(BaseCommand): @classmethod def", "full, absolute path to the project's Ember app.''' return abspath(join( settings.BASE_DIR, cls.get_setting('EMBER_APP_PATH'))) def", "False, just omit the kwarg if value: command.append('--' + key) if value is", "class EmberCommand(BaseCommand): @classmethod def get_setting(cls, key): '''Get a setting from the user's project", "(booleans are assumed to be \"boolean named arguments\") e.g.: run_ember_command('generate', 'route', 'foobar', pod=True)", "None: missing_settings.append(key) if missing_settings: raise CommandError( 'settings.EMBER_TOOLKIT is missing the following keys: '", "containing the actual Model class objects that are specified by MODELS_TO_SYNC.''' for app_config", "we pass None or False, just omit the kwarg if value: command.append('--' +", "and kwargs will be converted into positional and named arguments respectively (booleans are", "DEFAULT_SETTINGS = { 'EMBER_APP_NAME': None, 'API_PATH': None, 'EMBER_APP_PATH': 'client', 'MODELS_TO_SYNC': None } class", "'''Run the named ember in the project's FULL_EMBER_PATH. Any args and kwargs will", "'.join(missing_settings)) def run_ember_command(self, cmd_name, *args, **kwargs): '''Run the named ember in the project's", "} class EmberCommand(BaseCommand): @classmethod def get_setting(cls, key): '''Get a setting from the user's", "from termcolor import colored SEPARATOR = '---------------------------------------------------------------' # settings with a default of", "'settings.EMBER_TOOLKIT is missing the following keys: ' + ', '.join(missing_settings)) def run_ember_command(self, cmd_name,", "= join(cls.get_full_ember_path(), 'config/environment.js') with open(config_path, 'w') as config_file: config_file.write(config_source) def get_sync_model_set(cls): '''Return a", "error if any of args are not configured in settings.EMBER_TOOLKIT''' if not hasattr(settings,", "to the project's Ember app.''' return abspath(join( settings.BASE_DIR, cls.get_setting('EMBER_APP_PATH'))) def notify(self, some_text): self.stdout.write(SEPARATOR)", "self.stdout.write(SEPARATOR) @classmethod def assert_required_settings(cls, *args): '''Raise a useful error if any of args", "omit the kwarg if value: command.append('--' + key) if value is not True:", "a set containing the actual Model class objects that are specified by MODELS_TO_SYNC.'''", "e.g.: run_ember_command('generate', 'route', 'foobar', pod=True) becomes: ember generate route foobar --pod ''' command", "command = ['ember', cmd_name] + list(args) for key, value in kwargs: # in", "value in kwargs: # in the unlikely case we pass None or False,", "unlikely case we pass None or False, just omit the kwarg if value:", "settings.EMBER_TOOLKIT.get(key, DEFAULT_SETTINGS[key]) @classmethod def get_full_ember_path(cls): '''Return the full, absolute path to the project's", "be converted into positional and named arguments respectively (booleans are assumed to be", "get_full_ember_path(cls): '''Return the full, absolute path to the project's Ember app.''' return abspath(join(", "os.path import abspath, join import subprocess from django.apps import apps as django_apps from", "actual Model class objects that are specified by MODELS_TO_SYNC.''' for app_config in django_apps.get_app_configs():", "app.''' return abspath(join( settings.BASE_DIR, cls.get_setting('EMBER_APP_PATH'))) def notify(self, some_text): self.stdout.write(SEPARATOR) self.stdout.write(some_text) self.stdout.write(SEPARATOR) @classmethod def", "render_to_string( 'django_ember_toolkit/environment.js', {'app_name': cls.get_setting('EMBER_APP_NAME')}) config_path = join(cls.get_full_ember_path(), 'config/environment.js') with open(config_path, 'w') as config_file:", "pass None or False, just omit the kwarg if value: command.append('--' + key)", "django_apps.get_app_configs(): model_name_set = set(cls.get_setting('MODELS_TO_SYNC')) model_set = set() for Model in app_config.get_models(): key =", "import render_to_string from termcolor import colored SEPARATOR = '---------------------------------------------------------------' # settings with a", "import abspath, join import subprocess from django.apps import apps as django_apps from django.conf", "'client', 'MODELS_TO_SYNC': None } class EmberCommand(BaseCommand): @classmethod def get_setting(cls, key): '''Get a setting", "is missing the following keys: ' + ', '.join(missing_settings)) def run_ember_command(self, cmd_name, *args,", "in settings.EMBER_TOOLKIT''' if not hasattr(settings, 'EMBER_TOOLKIT'): raise CommandError('You must define an EMBER_TOOLKIT dict", "default if there's no setting available.''' return settings.EMBER_TOOLKIT.get(key, DEFAULT_SETTINGS[key]) @classmethod def get_full_ember_path(cls): '''Return", "value: command.append('--' + key) if value is not True: command.append(\"'{}'\".format(value)) self.notify('Running {}...'.format(colored(' '.join(command),", "join import subprocess from django.apps import apps as django_apps from django.conf import settings", "import settings from django.core.management.base import BaseCommand, CommandError from django.template.loader import render_to_string from termcolor", "must define an EMBER_TOOLKIT dict in settings') missing_settings = [] for key in", "cls.get_setting(key) is None: missing_settings.append(key) if missing_settings: raise CommandError( 'settings.EMBER_TOOLKIT is missing the following", "assumed to be \"boolean named arguments\") e.g.: run_ember_command('generate', 'route', 'foobar', pod=True) becomes: ember", "at the given path.''' config_source = render_to_string( 'django_ember_toolkit/environment.js', {'app_name': cls.get_setting('EMBER_APP_NAME')}) config_path = join(cls.get_full_ember_path(),", "key, value in kwargs: # in the unlikely case we pass None or", "'w') as config_file: config_file.write(config_source) def get_sync_model_set(cls): '''Return a set containing the actual Model", "@classmethod def assert_required_settings(cls, *args): '''Raise a useful error if any of args are", "[] for key in args: if cls.get_setting(key) is None: missing_settings.append(key) if missing_settings: raise", "settings') missing_settings = [] for key in args: if cls.get_setting(key) is None: missing_settings.append(key)", "self.stdout.write(some_text) self.stdout.write(SEPARATOR) @classmethod def assert_required_settings(cls, *args): '''Raise a useful error if any of", "Model.__name__ app_star = Model._meta.app_label + '.*' if key in model_name_set or app_star in", "+ Model.__name__ app_star = Model._meta.app_label + '.*' if key in model_name_set or app_star", "django.core.management.base import BaseCommand, CommandError from django.template.loader import render_to_string from termcolor import colored SEPARATOR", "be \"boolean named arguments\") e.g.: run_ember_command('generate', 'route', 'foobar', pod=True) becomes: ember generate route", "if not hasattr(settings, 'EMBER_TOOLKIT'): raise CommandError('You must define an EMBER_TOOLKIT dict in settings')", "', '.join(missing_settings)) def run_ember_command(self, cmd_name, *args, **kwargs): '''Run the named ember in the", "in the unlikely case we pass None or False, just omit the kwarg", "config_source = render_to_string( 'django_ember_toolkit/environment.js', {'app_name': cls.get_setting('EMBER_APP_NAME')}) config_path = join(cls.get_full_ember_path(), 'config/environment.js') with open(config_path, 'w')", "objects that are specified by MODELS_TO_SYNC.''' for app_config in django_apps.get_app_configs(): model_name_set = set(cls.get_setting('MODELS_TO_SYNC'))", "import subprocess from django.apps import apps as django_apps from django.conf import settings from", "project by key, falling back on the default if there's no setting available.'''", "route foobar --pod ''' command = ['ember', cmd_name] + list(args) for key, value", "arguments\") e.g.: run_ember_command('generate', 'route', 'foobar', pod=True) becomes: ember generate route foobar --pod '''", "django.template.loader import render_to_string from termcolor import colored SEPARATOR = '---------------------------------------------------------------' # settings with", "file with support for backend \"autoconfiguration\" at the given path.''' config_source = render_to_string(", "setting available.''' return settings.EMBER_TOOLKIT.get(key, DEFAULT_SETTINGS[key]) @classmethod def get_full_ember_path(cls): '''Return the full, absolute path", "subprocess from django.apps import apps as django_apps from django.conf import settings from django.core.management.base", "\"autoconfiguration\" at the given path.''' config_source = render_to_string( 'django_ember_toolkit/environment.js', {'app_name': cls.get_setting('EMBER_APP_NAME')}) config_path =", "get_sync_model_set(cls): '''Return a set containing the actual Model class objects that are specified", "run_ember_command(self, cmd_name, *args, **kwargs): '''Run the named ember in the project's FULL_EMBER_PATH. Any", "join(cls.get_full_ember_path(), 'config/environment.js') with open(config_path, 'w') as config_file: config_file.write(config_source) def get_sync_model_set(cls): '''Return a set", "def get_setting(cls, key): '''Get a setting from the user's project by key, falling", "= { 'EMBER_APP_NAME': None, 'API_PATH': None, 'EMBER_APP_PATH': 'client', 'MODELS_TO_SYNC': None } class EmberCommand(BaseCommand):", "for backend \"autoconfiguration\" at the given path.''' config_source = render_to_string( 'django_ember_toolkit/environment.js', {'app_name': cls.get_setting('EMBER_APP_NAME')})", "args and kwargs will be converted into positional and named arguments respectively (booleans", "+ key) if value is not True: command.append(\"'{}'\".format(value)) self.notify('Running {}...'.format(colored(' '.join(command), 'green'))) subprocess.check_call(command,", "= set() for Model in app_config.get_models(): key = Model._meta.app_label + '.' + Model.__name__", "Any args and kwargs will be converted into positional and named arguments respectively", "import apps as django_apps from django.conf import settings from django.core.management.base import BaseCommand, CommandError", "some_text): self.stdout.write(SEPARATOR) self.stdout.write(some_text) self.stdout.write(SEPARATOR) @classmethod def assert_required_settings(cls, *args): '''Raise a useful error if", "@classmethod def write_initial_config(cls): '''Generate an Ember config file with support for backend \"autoconfiguration\"", "*args, **kwargs): '''Run the named ember in the project's FULL_EMBER_PATH. Any args and", "Model class objects that are specified by MODELS_TO_SYNC.''' for app_config in django_apps.get_app_configs(): model_name_set", "class objects that are specified by MODELS_TO_SYNC.''' for app_config in django_apps.get_app_configs(): model_name_set =", "named arguments respectively (booleans are assumed to be \"boolean named arguments\") e.g.: run_ember_command('generate',", "named ember in the project's FULL_EMBER_PATH. Any args and kwargs will be converted", "of None are required DEFAULT_SETTINGS = { 'EMBER_APP_NAME': None, 'API_PATH': None, 'EMBER_APP_PATH': 'client',", "if any of args are not configured in settings.EMBER_TOOLKIT''' if not hasattr(settings, 'EMBER_TOOLKIT'):", "ember in the project's FULL_EMBER_PATH. Any args and kwargs will be converted into", "the given path.''' config_source = render_to_string( 'django_ember_toolkit/environment.js', {'app_name': cls.get_setting('EMBER_APP_NAME')}) config_path = join(cls.get_full_ember_path(), 'config/environment.js')", "config file with support for backend \"autoconfiguration\" at the given path.''' config_source =", "self.stdout.write(SEPARATOR) self.stdout.write(some_text) self.stdout.write(SEPARATOR) @classmethod def assert_required_settings(cls, *args): '''Raise a useful error if any", "config_path = join(cls.get_full_ember_path(), 'config/environment.js') with open(config_path, 'w') as config_file: config_file.write(config_source) def get_sync_model_set(cls): '''Return", "key in args: if cls.get_setting(key) is None: missing_settings.append(key) if missing_settings: raise CommandError( 'settings.EMBER_TOOLKIT", "a setting from the user's project by key, falling back on the default", "'EMBER_APP_PATH': 'client', 'MODELS_TO_SYNC': None } class EmberCommand(BaseCommand): @classmethod def get_setting(cls, key): '''Get a", "any of args are not configured in settings.EMBER_TOOLKIT''' if not hasattr(settings, 'EMBER_TOOLKIT'): raise", "subprocess.check_call(command, cwd=self.get_full_ember_path()) @classmethod def write_initial_config(cls): '''Generate an Ember config file with support for", "CommandError( 'settings.EMBER_TOOLKIT is missing the following keys: ' + ', '.join(missing_settings)) def run_ember_command(self,", "dict in settings') missing_settings = [] for key in args: if cls.get_setting(key) is", "not hasattr(settings, 'EMBER_TOOLKIT'): raise CommandError('You must define an EMBER_TOOLKIT dict in settings') missing_settings", "args are not configured in settings.EMBER_TOOLKIT''' if not hasattr(settings, 'EMBER_TOOLKIT'): raise CommandError('You must", "from the user's project by key, falling back on the default if there's", "case we pass None or False, just omit the kwarg if value: command.append('--'", "= set(cls.get_setting('MODELS_TO_SYNC')) model_set = set() for Model in app_config.get_models(): key = Model._meta.app_label +", "not configured in settings.EMBER_TOOLKIT''' if not hasattr(settings, 'EMBER_TOOLKIT'): raise CommandError('You must define an", "value is not True: command.append(\"'{}'\".format(value)) self.notify('Running {}...'.format(colored(' '.join(command), 'green'))) subprocess.check_call(command, cwd=self.get_full_ember_path()) @classmethod def", "following keys: ' + ', '.join(missing_settings)) def run_ember_command(self, cmd_name, *args, **kwargs): '''Run the", "project's Ember app.''' return abspath(join( settings.BASE_DIR, cls.get_setting('EMBER_APP_PATH'))) def notify(self, some_text): self.stdout.write(SEPARATOR) self.stdout.write(some_text) self.stdout.write(SEPARATOR)", "are assumed to be \"boolean named arguments\") e.g.: run_ember_command('generate', 'route', 'foobar', pod=True) becomes:", "kwargs: # in the unlikely case we pass None or False, just omit", "'''Return a set containing the actual Model class objects that are specified by", "or False, just omit the kwarg if value: command.append('--' + key) if value", "to be \"boolean named arguments\") e.g.: run_ember_command('generate', 'route', 'foobar', pod=True) becomes: ember generate", "{ 'EMBER_APP_NAME': None, 'API_PATH': None, 'EMBER_APP_PATH': 'client', 'MODELS_TO_SYNC': None } class EmberCommand(BaseCommand): @classmethod", "raise CommandError( 'settings.EMBER_TOOLKIT is missing the following keys: ' + ', '.join(missing_settings)) def", "for key, value in kwargs: # in the unlikely case we pass None", "notify(self, some_text): self.stdout.write(SEPARATOR) self.stdout.write(some_text) self.stdout.write(SEPARATOR) @classmethod def assert_required_settings(cls, *args): '''Raise a useful error", "in args: if cls.get_setting(key) is None: missing_settings.append(key) if missing_settings: raise CommandError( 'settings.EMBER_TOOLKIT is", "with open(config_path, 'w') as config_file: config_file.write(config_source) def get_sync_model_set(cls): '''Return a set containing the", "settings from django.core.management.base import BaseCommand, CommandError from django.template.loader import render_to_string from termcolor import", "given path.''' config_source = render_to_string( 'django_ember_toolkit/environment.js', {'app_name': cls.get_setting('EMBER_APP_NAME')}) config_path = join(cls.get_full_ember_path(), 'config/environment.js') with", "= '---------------------------------------------------------------' # settings with a default of None are required DEFAULT_SETTINGS =", "'''Raise a useful error if any of args are not configured in settings.EMBER_TOOLKIT'''", "= [] for key in args: if cls.get_setting(key) is None: missing_settings.append(key) if missing_settings:", "config_file: config_file.write(config_source) def get_sync_model_set(cls): '''Return a set containing the actual Model class objects", "for key in args: if cls.get_setting(key) is None: missing_settings.append(key) if missing_settings: raise CommandError(", "command.append(\"'{}'\".format(value)) self.notify('Running {}...'.format(colored(' '.join(command), 'green'))) subprocess.check_call(command, cwd=self.get_full_ember_path()) @classmethod def write_initial_config(cls): '''Generate an Ember", "missing_settings: raise CommandError( 'settings.EMBER_TOOLKIT is missing the following keys: ' + ', '.join(missing_settings))", "from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.template.loader import render_to_string", "''' command = ['ember', cmd_name] + list(args) for key, value in kwargs: #", "django.apps import apps as django_apps from django.conf import settings from django.core.management.base import BaseCommand,", "MODELS_TO_SYNC.''' for app_config in django_apps.get_app_configs(): model_name_set = set(cls.get_setting('MODELS_TO_SYNC')) model_set = set() for Model", "in settings') missing_settings = [] for key in args: if cls.get_setting(key) is None:", "Ember config file with support for backend \"autoconfiguration\" at the given path.''' config_source", "pod=True) becomes: ember generate route foobar --pod ''' command = ['ember', cmd_name] +", "required DEFAULT_SETTINGS = { 'EMBER_APP_NAME': None, 'API_PATH': None, 'EMBER_APP_PATH': 'client', 'MODELS_TO_SYNC': None }", "user's project by key, falling back on the default if there's no setting", "key = Model._meta.app_label + '.' + Model.__name__ app_star = Model._meta.app_label + '.*' if", "are not configured in settings.EMBER_TOOLKIT''' if not hasattr(settings, 'EMBER_TOOLKIT'): raise CommandError('You must define", "+ '.' + Model.__name__ app_star = Model._meta.app_label + '.*' if key in model_name_set", "'''Get a setting from the user's project by key, falling back on the", "specified by MODELS_TO_SYNC.''' for app_config in django_apps.get_app_configs(): model_name_set = set(cls.get_setting('MODELS_TO_SYNC')) model_set = set()", "*args): '''Raise a useful error if any of args are not configured in", "None or False, just omit the kwarg if value: command.append('--' + key) if", "CommandError from django.template.loader import render_to_string from termcolor import colored SEPARATOR = '---------------------------------------------------------------' #", "there's no setting available.''' return settings.EMBER_TOOLKIT.get(key, DEFAULT_SETTINGS[key]) @classmethod def get_full_ember_path(cls): '''Return the full,", "def write_initial_config(cls): '''Generate an Ember config file with support for backend \"autoconfiguration\" at", "missing_settings.append(key) if missing_settings: raise CommandError( 'settings.EMBER_TOOLKIT is missing the following keys: ' +", "return settings.EMBER_TOOLKIT.get(key, DEFAULT_SETTINGS[key]) @classmethod def get_full_ember_path(cls): '''Return the full, absolute path to the", "of args are not configured in settings.EMBER_TOOLKIT''' if not hasattr(settings, 'EMBER_TOOLKIT'): raise CommandError('You", "Ember app.''' return abspath(join( settings.BASE_DIR, cls.get_setting('EMBER_APP_PATH'))) def notify(self, some_text): self.stdout.write(SEPARATOR) self.stdout.write(some_text) self.stdout.write(SEPARATOR) @classmethod", "by MODELS_TO_SYNC.''' for app_config in django_apps.get_app_configs(): model_name_set = set(cls.get_setting('MODELS_TO_SYNC')) model_set = set() for", "as config_file: config_file.write(config_source) def get_sync_model_set(cls): '''Return a set containing the actual Model class", "if there's no setting available.''' return settings.EMBER_TOOLKIT.get(key, DEFAULT_SETTINGS[key]) @classmethod def get_full_ember_path(cls): '''Return the", "def get_sync_model_set(cls): '''Return a set containing the actual Model class objects that are", "colored SEPARATOR = '---------------------------------------------------------------' # settings with a default of None are required", "return abspath(join( settings.BASE_DIR, cls.get_setting('EMBER_APP_PATH'))) def notify(self, some_text): self.stdout.write(SEPARATOR) self.stdout.write(some_text) self.stdout.write(SEPARATOR) @classmethod def assert_required_settings(cls,", "are specified by MODELS_TO_SYNC.''' for app_config in django_apps.get_app_configs(): model_name_set = set(cls.get_setting('MODELS_TO_SYNC')) model_set =", "import colored SEPARATOR = '---------------------------------------------------------------' # settings with a default of None are", "a default of None are required DEFAULT_SETTINGS = { 'EMBER_APP_NAME': None, 'API_PATH': None,", "'EMBER_TOOLKIT'): raise CommandError('You must define an EMBER_TOOLKIT dict in settings') missing_settings = []", "positional and named arguments respectively (booleans are assumed to be \"boolean named arguments\")", "settings with a default of None are required DEFAULT_SETTINGS = { 'EMBER_APP_NAME': None,", "the full, absolute path to the project's Ember app.''' return abspath(join( settings.BASE_DIR, cls.get_setting('EMBER_APP_PATH')))", "open(config_path, 'w') as config_file: config_file.write(config_source) def get_sync_model_set(cls): '''Return a set containing the actual", "backend \"autoconfiguration\" at the given path.''' config_source = render_to_string( 'django_ember_toolkit/environment.js', {'app_name': cls.get_setting('EMBER_APP_NAME')}) config_path", "project's FULL_EMBER_PATH. Any args and kwargs will be converted into positional and named", "with support for backend \"autoconfiguration\" at the given path.''' config_source = render_to_string( 'django_ember_toolkit/environment.js',", "are required DEFAULT_SETTINGS = { 'EMBER_APP_NAME': None, 'API_PATH': None, 'EMBER_APP_PATH': 'client', 'MODELS_TO_SYNC': None", "not True: command.append(\"'{}'\".format(value)) self.notify('Running {}...'.format(colored(' '.join(command), 'green'))) subprocess.check_call(command, cwd=self.get_full_ember_path()) @classmethod def write_initial_config(cls): '''Generate", "model_set = set() for Model in app_config.get_models(): key = Model._meta.app_label + '.' +", "'route', 'foobar', pod=True) becomes: ember generate route foobar --pod ''' command = ['ember',", "the default if there's no setting available.''' return settings.EMBER_TOOLKIT.get(key, DEFAULT_SETTINGS[key]) @classmethod def get_full_ember_path(cls):", "for app_config in django_apps.get_app_configs(): model_name_set = set(cls.get_setting('MODELS_TO_SYNC')) model_set = set() for Model in", "'API_PATH': None, 'EMBER_APP_PATH': 'client', 'MODELS_TO_SYNC': None } class EmberCommand(BaseCommand): @classmethod def get_setting(cls, key):", "{'app_name': cls.get_setting('EMBER_APP_NAME')}) config_path = join(cls.get_full_ember_path(), 'config/environment.js') with open(config_path, 'w') as config_file: config_file.write(config_source) def", "import BaseCommand, CommandError from django.template.loader import render_to_string from termcolor import colored SEPARATOR =", "back on the default if there's no setting available.''' return settings.EMBER_TOOLKIT.get(key, DEFAULT_SETTINGS[key]) @classmethod", "useful error if any of args are not configured in settings.EMBER_TOOLKIT''' if not", "cls.get_setting('EMBER_APP_PATH'))) def notify(self, some_text): self.stdout.write(SEPARATOR) self.stdout.write(some_text) self.stdout.write(SEPARATOR) @classmethod def assert_required_settings(cls, *args): '''Raise a", "in app_config.get_models(): key = Model._meta.app_label + '.' + Model.__name__ app_star = Model._meta.app_label +", "into positional and named arguments respectively (booleans are assumed to be \"boolean named", "EmberCommand(BaseCommand): @classmethod def get_setting(cls, key): '''Get a setting from the user's project by", "the following keys: ' + ', '.join(missing_settings)) def run_ember_command(self, cmd_name, *args, **kwargs): '''Run", "from os.path import abspath, join import subprocess from django.apps import apps as django_apps", "abspath, join import subprocess from django.apps import apps as django_apps from django.conf import", "command.append('--' + key) if value is not True: command.append(\"'{}'\".format(value)) self.notify('Running {}...'.format(colored(' '.join(command), 'green')))", "model_name_set = set(cls.get_setting('MODELS_TO_SYNC')) model_set = set() for Model in app_config.get_models(): key = Model._meta.app_label", "FULL_EMBER_PATH. Any args and kwargs will be converted into positional and named arguments", "the named ember in the project's FULL_EMBER_PATH. Any args and kwargs will be", "an EMBER_TOOLKIT dict in settings') missing_settings = [] for key in args: if", "default of None are required DEFAULT_SETTINGS = { 'EMBER_APP_NAME': None, 'API_PATH': None, 'EMBER_APP_PATH':", "respectively (booleans are assumed to be \"boolean named arguments\") e.g.: run_ember_command('generate', 'route', 'foobar',", "DEFAULT_SETTINGS[key]) @classmethod def get_full_ember_path(cls): '''Return the full, absolute path to the project's Ember", "keys: ' + ', '.join(missing_settings)) def run_ember_command(self, cmd_name, *args, **kwargs): '''Run the named", "the actual Model class objects that are specified by MODELS_TO_SYNC.''' for app_config in", "Model._meta.app_label + '.' + Model.__name__ app_star = Model._meta.app_label + '.*' if key in", "configured in settings.EMBER_TOOLKIT''' if not hasattr(settings, 'EMBER_TOOLKIT'): raise CommandError('You must define an EMBER_TOOLKIT", "set() for Model in app_config.get_models(): key = Model._meta.app_label + '.' + Model.__name__ app_star", "support for backend \"autoconfiguration\" at the given path.''' config_source = render_to_string( 'django_ember_toolkit/environment.js', {'app_name':", "'green'))) subprocess.check_call(command, cwd=self.get_full_ember_path()) @classmethod def write_initial_config(cls): '''Generate an Ember config file with support", "True: command.append(\"'{}'\".format(value)) self.notify('Running {}...'.format(colored(' '.join(command), 'green'))) subprocess.check_call(command, cwd=self.get_full_ember_path()) @classmethod def write_initial_config(cls): '''Generate an", "assert_required_settings(cls, *args): '''Raise a useful error if any of args are not configured", "path.''' config_source = render_to_string( 'django_ember_toolkit/environment.js', {'app_name': cls.get_setting('EMBER_APP_NAME')}) config_path = join(cls.get_full_ember_path(), 'config/environment.js') with open(config_path,", "+ '.*' if key in model_name_set or app_star in model_name_set: model_set.add(Model) return model_set", "absolute path to the project's Ember app.''' return abspath(join( settings.BASE_DIR, cls.get_setting('EMBER_APP_PATH'))) def notify(self,", "'.' + Model.__name__ app_star = Model._meta.app_label + '.*' if key in model_name_set or", "'config/environment.js') with open(config_path, 'w') as config_file: config_file.write(config_source) def get_sync_model_set(cls): '''Return a set containing", "None, 'EMBER_APP_PATH': 'client', 'MODELS_TO_SYNC': None } class EmberCommand(BaseCommand): @classmethod def get_setting(cls, key): '''Get", "django_apps from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.template.loader import", "def run_ember_command(self, cmd_name, *args, **kwargs): '''Run the named ember in the project's FULL_EMBER_PATH.", "arguments respectively (booleans are assumed to be \"boolean named arguments\") e.g.: run_ember_command('generate', 'route',", "'foobar', pod=True) becomes: ember generate route foobar --pod ''' command = ['ember', cmd_name]", "named arguments\") e.g.: run_ember_command('generate', 'route', 'foobar', pod=True) becomes: ember generate route foobar --pod", "= Model._meta.app_label + '.*' if key in model_name_set or app_star in model_name_set: model_set.add(Model)", "with a default of None are required DEFAULT_SETTINGS = { 'EMBER_APP_NAME': None, 'API_PATH':", "by key, falling back on the default if there's no setting available.''' return", "from django.core.management.base import BaseCommand, CommandError from django.template.loader import render_to_string from termcolor import colored", "SEPARATOR = '---------------------------------------------------------------' # settings with a default of None are required DEFAULT_SETTINGS", "that are specified by MODELS_TO_SYNC.''' for app_config in django_apps.get_app_configs(): model_name_set = set(cls.get_setting('MODELS_TO_SYNC')) model_set", "= ['ember', cmd_name] + list(args) for key, value in kwargs: # in the", "converted into positional and named arguments respectively (booleans are assumed to be \"boolean", "a useful error if any of args are not configured in settings.EMBER_TOOLKIT''' if", "app_config.get_models(): key = Model._meta.app_label + '.' + Model.__name__ app_star = Model._meta.app_label + '.*'", "from django.template.loader import render_to_string from termcolor import colored SEPARATOR = '---------------------------------------------------------------' # settings", "is None: missing_settings.append(key) if missing_settings: raise CommandError( 'settings.EMBER_TOOLKIT is missing the following keys:", "ember generate route foobar --pod ''' command = ['ember', cmd_name] + list(args) for", "render_to_string from termcolor import colored SEPARATOR = '---------------------------------------------------------------' # settings with a default", "BaseCommand, CommandError from django.template.loader import render_to_string from termcolor import colored SEPARATOR = '---------------------------------------------------------------'", "termcolor import colored SEPARATOR = '---------------------------------------------------------------' # settings with a default of None", "the kwarg if value: command.append('--' + key) if value is not True: command.append(\"'{}'\".format(value))", "as django_apps from django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.template.loader", "args: if cls.get_setting(key) is None: missing_settings.append(key) if missing_settings: raise CommandError( 'settings.EMBER_TOOLKIT is missing", "the project's Ember app.''' return abspath(join( settings.BASE_DIR, cls.get_setting('EMBER_APP_PATH'))) def notify(self, some_text): self.stdout.write(SEPARATOR) self.stdout.write(some_text)", "' + ', '.join(missing_settings)) def run_ember_command(self, cmd_name, *args, **kwargs): '''Run the named ember", "if missing_settings: raise CommandError( 'settings.EMBER_TOOLKIT is missing the following keys: ' + ',", "hasattr(settings, 'EMBER_TOOLKIT'): raise CommandError('You must define an EMBER_TOOLKIT dict in settings') missing_settings =", "will be converted into positional and named arguments respectively (booleans are assumed to", "cmd_name] + list(args) for key, value in kwargs: # in the unlikely case", "from django.apps import apps as django_apps from django.conf import settings from django.core.management.base import", "available.''' return settings.EMBER_TOOLKIT.get(key, DEFAULT_SETTINGS[key]) @classmethod def get_full_ember_path(cls): '''Return the full, absolute path to", "None } class EmberCommand(BaseCommand): @classmethod def get_setting(cls, key): '''Get a setting from the", "setting from the user's project by key, falling back on the default if", "'''Return the full, absolute path to the project's Ember app.''' return abspath(join( settings.BASE_DIR,", "CommandError('You must define an EMBER_TOOLKIT dict in settings') missing_settings = [] for key", "falling back on the default if there's no setting available.''' return settings.EMBER_TOOLKIT.get(key, DEFAULT_SETTINGS[key])", "an Ember config file with support for backend \"autoconfiguration\" at the given path.'''", "def assert_required_settings(cls, *args): '''Raise a useful error if any of args are not", "if cls.get_setting(key) is None: missing_settings.append(key) if missing_settings: raise CommandError( 'settings.EMBER_TOOLKIT is missing the", "--pod ''' command = ['ember', cmd_name] + list(args) for key, value in kwargs:", "= render_to_string( 'django_ember_toolkit/environment.js', {'app_name': cls.get_setting('EMBER_APP_NAME')}) config_path = join(cls.get_full_ember_path(), 'config/environment.js') with open(config_path, 'w') as", "key): '''Get a setting from the user's project by key, falling back on", "Model in app_config.get_models(): key = Model._meta.app_label + '.' + Model.__name__ app_star = Model._meta.app_label", "{}...'.format(colored(' '.join(command), 'green'))) subprocess.check_call(command, cwd=self.get_full_ember_path()) @classmethod def write_initial_config(cls): '''Generate an Ember config file", "in django_apps.get_app_configs(): model_name_set = set(cls.get_setting('MODELS_TO_SYNC')) model_set = set() for Model in app_config.get_models(): key", "list(args) for key, value in kwargs: # in the unlikely case we pass", "define an EMBER_TOOLKIT dict in settings') missing_settings = [] for key in args:", "apps as django_apps from django.conf import settings from django.core.management.base import BaseCommand, CommandError from", "abspath(join( settings.BASE_DIR, cls.get_setting('EMBER_APP_PATH'))) def notify(self, some_text): self.stdout.write(SEPARATOR) self.stdout.write(some_text) self.stdout.write(SEPARATOR) @classmethod def assert_required_settings(cls, *args):", "if value: command.append('--' + key) if value is not True: command.append(\"'{}'\".format(value)) self.notify('Running {}...'.format(colored('", "kwarg if value: command.append('--' + key) if value is not True: command.append(\"'{}'\".format(value)) self.notify('Running", "\"boolean named arguments\") e.g.: run_ember_command('generate', 'route', 'foobar', pod=True) becomes: ember generate route foobar", "EMBER_TOOLKIT dict in settings') missing_settings = [] for key in args: if cls.get_setting(key)", "key) if value is not True: command.append(\"'{}'\".format(value)) self.notify('Running {}...'.format(colored(' '.join(command), 'green'))) subprocess.check_call(command, cwd=self.get_full_ember_path())", "set(cls.get_setting('MODELS_TO_SYNC')) model_set = set() for Model in app_config.get_models(): key = Model._meta.app_label + '.'", "None, 'API_PATH': None, 'EMBER_APP_PATH': 'client', 'MODELS_TO_SYNC': None } class EmberCommand(BaseCommand): @classmethod def get_setting(cls,", "django.conf import settings from django.core.management.base import BaseCommand, CommandError from django.template.loader import render_to_string from", "'.join(command), 'green'))) subprocess.check_call(command, cwd=self.get_full_ember_path()) @classmethod def write_initial_config(cls): '''Generate an Ember config file with", "None are required DEFAULT_SETTINGS = { 'EMBER_APP_NAME': None, 'API_PATH': None, 'EMBER_APP_PATH': 'client', 'MODELS_TO_SYNC':", "'django_ember_toolkit/environment.js', {'app_name': cls.get_setting('EMBER_APP_NAME')}) config_path = join(cls.get_full_ember_path(), 'config/environment.js') with open(config_path, 'w') as config_file: config_file.write(config_source)", "config_file.write(config_source) def get_sync_model_set(cls): '''Return a set containing the actual Model class objects that", "generate route foobar --pod ''' command = ['ember', cmd_name] + list(args) for key,", "and named arguments respectively (booleans are assumed to be \"boolean named arguments\") e.g.:", "raise CommandError('You must define an EMBER_TOOLKIT dict in settings') missing_settings = [] for", "set containing the actual Model class objects that are specified by MODELS_TO_SYNC.''' for", "# settings with a default of None are required DEFAULT_SETTINGS = { 'EMBER_APP_NAME':", "+ ', '.join(missing_settings)) def run_ember_command(self, cmd_name, *args, **kwargs): '''Run the named ember in", "'''Generate an Ember config file with support for backend \"autoconfiguration\" at the given", "on the default if there's no setting available.''' return settings.EMBER_TOOLKIT.get(key, DEFAULT_SETTINGS[key]) @classmethod def", "if value is not True: command.append(\"'{}'\".format(value)) self.notify('Running {}...'.format(colored(' '.join(command), 'green'))) subprocess.check_call(command, cwd=self.get_full_ember_path()) @classmethod", "write_initial_config(cls): '''Generate an Ember config file with support for backend \"autoconfiguration\" at the", "the unlikely case we pass None or False, just omit the kwarg if", "settings.EMBER_TOOLKIT''' if not hasattr(settings, 'EMBER_TOOLKIT'): raise CommandError('You must define an EMBER_TOOLKIT dict in", "in kwargs: # in the unlikely case we pass None or False, just", "the user's project by key, falling back on the default if there's no", "run_ember_command('generate', 'route', 'foobar', pod=True) becomes: ember generate route foobar --pod ''' command =", "cwd=self.get_full_ember_path()) @classmethod def write_initial_config(cls): '''Generate an Ember config file with support for backend", "key, falling back on the default if there's no setting available.''' return settings.EMBER_TOOLKIT.get(key,", "app_star = Model._meta.app_label + '.*' if key in model_name_set or app_star in model_name_set:", "**kwargs): '''Run the named ember in the project's FULL_EMBER_PATH. Any args and kwargs", "+ list(args) for key, value in kwargs: # in the unlikely case we", "cmd_name, *args, **kwargs): '''Run the named ember in the project's FULL_EMBER_PATH. Any args", "# in the unlikely case we pass None or False, just omit the", "'---------------------------------------------------------------' # settings with a default of None are required DEFAULT_SETTINGS = {", "foobar --pod ''' command = ['ember', cmd_name] + list(args) for key, value in", "= Model._meta.app_label + '.' + Model.__name__ app_star = Model._meta.app_label + '.*' if key", "def notify(self, some_text): self.stdout.write(SEPARATOR) self.stdout.write(some_text) self.stdout.write(SEPARATOR) @classmethod def assert_required_settings(cls, *args): '''Raise a useful", "'MODELS_TO_SYNC': None } class EmberCommand(BaseCommand): @classmethod def get_setting(cls, key): '''Get a setting from", "kwargs will be converted into positional and named arguments respectively (booleans are assumed", "def get_full_ember_path(cls): '''Return the full, absolute path to the project's Ember app.''' return", "the project's FULL_EMBER_PATH. Any args and kwargs will be converted into positional and", "app_config in django_apps.get_app_configs(): model_name_set = set(cls.get_setting('MODELS_TO_SYNC')) model_set = set() for Model in app_config.get_models():", "self.notify('Running {}...'.format(colored(' '.join(command), 'green'))) subprocess.check_call(command, cwd=self.get_full_ember_path()) @classmethod def write_initial_config(cls): '''Generate an Ember config", "missing the following keys: ' + ', '.join(missing_settings)) def run_ember_command(self, cmd_name, *args, **kwargs):", "is not True: command.append(\"'{}'\".format(value)) self.notify('Running {}...'.format(colored(' '.join(command), 'green'))) subprocess.check_call(command, cwd=self.get_full_ember_path()) @classmethod def write_initial_config(cls):", "get_setting(cls, key): '''Get a setting from the user's project by key, falling back", "@classmethod def get_full_ember_path(cls): '''Return the full, absolute path to the project's Ember app.'''", "settings.BASE_DIR, cls.get_setting('EMBER_APP_PATH'))) def notify(self, some_text): self.stdout.write(SEPARATOR) self.stdout.write(some_text) self.stdout.write(SEPARATOR) @classmethod def assert_required_settings(cls, *args): '''Raise", "cls.get_setting('EMBER_APP_NAME')}) config_path = join(cls.get_full_ember_path(), 'config/environment.js') with open(config_path, 'w') as config_file: config_file.write(config_source) def get_sync_model_set(cls):", "just omit the kwarg if value: command.append('--' + key) if value is not", "['ember', cmd_name] + list(args) for key, value in kwargs: # in the unlikely", "in the project's FULL_EMBER_PATH. Any args and kwargs will be converted into positional", "missing_settings = [] for key in args: if cls.get_setting(key) is None: missing_settings.append(key) if" ]
[ "FieldPanel, StreamFieldPanel from wagtail.images.edit_handlers import ImageChooserPanel from wagtail.core.models import Page from wagtail.api import", "banner image\", ) content = StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()), ('CallToActionBlock', CallToActionBlock()), ],", "StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()), ('CallToActionBlock', CallToActionBlock()), ], null=True, blank=True) content_panels = [", "\"\"\"Page models.\"\"\" from django.db import models from wagtail.admin.edit_handlers import FieldPanel, StreamFieldPanel from wagtail.images.edit_handlers", "APIField(\"title\"), APIField(\"content\"), ] class Meta: \"\"\"Meta information.\"\"\" verbose_name = \"Flex Page\" verbose_name_plural =", "classname=\"full title\"), StreamFieldPanel('content'), ] api_fields = [ APIField(\"title\"), APIField(\"content\"), ] class Meta: \"\"\"Meta", "import StreamField from .streamfields import ContentBlock, ImageGalleryBlock, CallToActionBlock class HomePage(Page): \"\"\"A home page", "] class Meta: \"\"\"Meta information.\"\"\" verbose_name = \"Home Page\" verbose_name_plural = \"Home Pages\"", "ImageChooserPanel(\"banner_image\"), FieldPanel(\"banner_subtitle\"), StreamFieldPanel('content'), ] api_fields = [ APIField(\"title\"), APIField(\"banner_subtitle\"), APIField(\"banner_image\"), APIField(\"banner_image_thumbnail\", serializer=ImageRenditionField(\"fill-100x100\", source=\"banner_image\")),", "CallToActionBlock()), ], null=True, blank=True) content_panels = [ FieldPanel(\"title\", classname=\"full title\"), StreamFieldPanel('content'), ] api_fields", "= \"cms/pages/flex_page.html\" subpage_types = [] content = StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()), ('CallToActionBlock',", "from django.db import models from wagtail.admin.edit_handlers import FieldPanel, StreamFieldPanel from wagtail.images.edit_handlers import ImageChooserPanel", "django.db import models from wagtail.admin.edit_handlers import FieldPanel, StreamFieldPanel from wagtail.images.edit_handlers import ImageChooserPanel from", "ContentBlock, ImageGalleryBlock, CallToActionBlock class HomePage(Page): \"\"\"A home page class.\"\"\" template = \"cms/pages/home_page.html\" subpage_types", "on_delete=models.SET_NULL, related_name=\"+\", help_text=\"An optional banner image\", ) content = StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock',", "template = \"cms/pages/home_page.html\" subpage_types = ['pages.FlexPage'] banner_subtitle = models.CharField( max_length=50, blank=True, null=True, help_text=\"An", "help_text=\"An optional banner image\", ) content = StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()), ('CallToActionBlock',", "null=True, blank=True) content_panels = [ FieldPanel(\"title\", classname=\"full title\"), ImageChooserPanel(\"banner_image\"), FieldPanel(\"banner_subtitle\"), StreamFieldPanel('content'), ] api_fields", "from wagtail.images.api.fields import ImageRenditionField from wagtail.core.fields import StreamField from .streamfields import ContentBlock, ImageGalleryBlock,", "\"Home Page\" verbose_name_plural = \"Home Pages\" class FlexPage(Page): \"\"\"A Flexible page class. Used", "subpage_types = [] content = StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()), ('CallToActionBlock', CallToActionBlock()), ],", "blank=True) content_panels = [ FieldPanel(\"title\", classname=\"full title\"), StreamFieldPanel('content'), ] api_fields = [ APIField(\"title\"),", "[ FieldPanel(\"title\", classname=\"full title\"), StreamFieldPanel('content'), ] api_fields = [ APIField(\"title\"), APIField(\"content\"), ] class", "verbose_name_plural = \"Home Pages\" class FlexPage(Page): \"\"\"A Flexible page class. Used for generic", "blank=True, null=True, help_text=\"An optional banner subtitle\" ) banner_image = models.ForeignKey( \"wagtailimages.Image\", null=True, blank=False,", "blank=False, on_delete=models.SET_NULL, related_name=\"+\", help_text=\"An optional banner image\", ) content = StreamField([ ('ContentBlock', ContentBlock()),", "APIField(\"content\"), ] class Meta: \"\"\"Meta information.\"\"\" verbose_name = \"Flex Page\" verbose_name_plural = \"Flex", "verbose_name = \"Home Page\" verbose_name_plural = \"Home Pages\" class FlexPage(Page): \"\"\"A Flexible page", "help_text=\"An optional banner subtitle\" ) banner_image = models.ForeignKey( \"wagtailimages.Image\", null=True, blank=False, on_delete=models.SET_NULL, related_name=\"+\",", "coding: utf-8 -*- \"\"\"Page models.\"\"\" from django.db import models from wagtail.admin.edit_handlers import FieldPanel,", "APIField(\"banner_image_thumbnail\", serializer=ImageRenditionField(\"fill-100x100\", source=\"banner_image\")), APIField(\"content\"), ] class Meta: \"\"\"Meta information.\"\"\" verbose_name = \"Home Page\"", "wagtail.admin.edit_handlers import FieldPanel, StreamFieldPanel from wagtail.images.edit_handlers import ImageChooserPanel from wagtail.core.models import Page from", "banner_subtitle = models.CharField( max_length=50, blank=True, null=True, help_text=\"An optional banner subtitle\" ) banner_image =", "= models.ForeignKey( \"wagtailimages.Image\", null=True, blank=False, on_delete=models.SET_NULL, related_name=\"+\", help_text=\"An optional banner image\", ) content", "StreamFieldPanel('content'), ] api_fields = [ APIField(\"title\"), APIField(\"content\"), ] class Meta: \"\"\"Meta information.\"\"\" verbose_name", "CallToActionBlock()), ], null=True, blank=True) content_panels = [ FieldPanel(\"title\", classname=\"full title\"), ImageChooserPanel(\"banner_image\"), FieldPanel(\"banner_subtitle\"), StreamFieldPanel('content'),", "null=True, help_text=\"An optional banner subtitle\" ) banner_image = models.ForeignKey( \"wagtailimages.Image\", null=True, blank=False, on_delete=models.SET_NULL,", "APIField(\"title\"), APIField(\"banner_subtitle\"), APIField(\"banner_image\"), APIField(\"banner_image_thumbnail\", serializer=ImageRenditionField(\"fill-100x100\", source=\"banner_image\")), APIField(\"content\"), ] class Meta: \"\"\"Meta information.\"\"\" verbose_name", "api_fields = [ APIField(\"title\"), APIField(\"content\"), ] class Meta: \"\"\"Meta information.\"\"\" verbose_name = \"Flex", "Meta: \"\"\"Meta information.\"\"\" verbose_name = \"Home Page\" verbose_name_plural = \"Home Pages\" class FlexPage(Page):", ".streamfields import ContentBlock, ImageGalleryBlock, CallToActionBlock class HomePage(Page): \"\"\"A home page class.\"\"\" template =", "a true purpose.\"\"\" template = \"cms/pages/flex_page.html\" subpage_types = [] content = StreamField([ ('ContentBlock',", "ImageGalleryBlock, CallToActionBlock class HomePage(Page): \"\"\"A home page class.\"\"\" template = \"cms/pages/home_page.html\" subpage_types =", "related_name=\"+\", help_text=\"An optional banner image\", ) content = StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()),", "\"\"\"Meta information.\"\"\" verbose_name = \"Home Page\" verbose_name_plural = \"Home Pages\" class FlexPage(Page): \"\"\"A", "('ContentBlock', ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()), ('CallToActionBlock', CallToActionBlock()), ], null=True, blank=True) content_panels = [ FieldPanel(\"title\",", "ImageRenditionField from wagtail.core.fields import StreamField from .streamfields import ContentBlock, ImageGalleryBlock, CallToActionBlock class HomePage(Page):", "class.\"\"\" template = \"cms/pages/home_page.html\" subpage_types = ['pages.FlexPage'] banner_subtitle = models.CharField( max_length=50, blank=True, null=True,", "title\"), StreamFieldPanel('content'), ] api_fields = [ APIField(\"title\"), APIField(\"content\"), ] class Meta: \"\"\"Meta information.\"\"\"", "\"Home Pages\" class FlexPage(Page): \"\"\"A Flexible page class. Used for generic pages that", "# -*- coding: utf-8 -*- \"\"\"Page models.\"\"\" from django.db import models from wagtail.admin.edit_handlers", "wagtail.images.api.fields import ImageRenditionField from wagtail.core.fields import StreamField from .streamfields import ContentBlock, ImageGalleryBlock, CallToActionBlock", "= [ FieldPanel(\"title\", classname=\"full title\"), ImageChooserPanel(\"banner_image\"), FieldPanel(\"banner_subtitle\"), StreamFieldPanel('content'), ] api_fields = [ APIField(\"title\"),", "source=\"banner_image\")), APIField(\"content\"), ] class Meta: \"\"\"Meta information.\"\"\" verbose_name = \"Home Page\" verbose_name_plural =", "], null=True, blank=True) content_panels = [ FieldPanel(\"title\", classname=\"full title\"), StreamFieldPanel('content'), ] api_fields =", "HomePage(Page): \"\"\"A home page class.\"\"\" template = \"cms/pages/home_page.html\" subpage_types = ['pages.FlexPage'] banner_subtitle =", "= [ APIField(\"title\"), APIField(\"banner_subtitle\"), APIField(\"banner_image\"), APIField(\"banner_image_thumbnail\", serializer=ImageRenditionField(\"fill-100x100\", source=\"banner_image\")), APIField(\"content\"), ] class Meta: \"\"\"Meta", "import APIField from wagtail.images.api.fields import ImageRenditionField from wagtail.core.fields import StreamField from .streamfields import", "optional banner image\", ) content = StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()), ('CallToActionBlock', CallToActionBlock()),", "class. Used for generic pages that don't have a true purpose.\"\"\" template =", "= \"cms/pages/home_page.html\" subpage_types = ['pages.FlexPage'] banner_subtitle = models.CharField( max_length=50, blank=True, null=True, help_text=\"An optional", "blank=True) content_panels = [ FieldPanel(\"title\", classname=\"full title\"), ImageChooserPanel(\"banner_image\"), FieldPanel(\"banner_subtitle\"), StreamFieldPanel('content'), ] api_fields =", "import models from wagtail.admin.edit_handlers import FieldPanel, StreamFieldPanel from wagtail.images.edit_handlers import ImageChooserPanel from wagtail.core.models", "Flexible page class. Used for generic pages that don't have a true purpose.\"\"\"", "template = \"cms/pages/flex_page.html\" subpage_types = [] content = StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()),", "class FlexPage(Page): \"\"\"A Flexible page class. Used for generic pages that don't have", "] class Meta: \"\"\"Meta information.\"\"\" verbose_name = \"Flex Page\" verbose_name_plural = \"Flex Pages\"", "[] content = StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()), ('CallToActionBlock', CallToActionBlock()), ], null=True, blank=True)", "import ImageChooserPanel from wagtail.core.models import Page from wagtail.api import APIField from wagtail.images.api.fields import", "\"cms/pages/home_page.html\" subpage_types = ['pages.FlexPage'] banner_subtitle = models.CharField( max_length=50, blank=True, null=True, help_text=\"An optional banner", "= \"Home Page\" verbose_name_plural = \"Home Pages\" class FlexPage(Page): \"\"\"A Flexible page class.", "FieldPanel(\"title\", classname=\"full title\"), ImageChooserPanel(\"banner_image\"), FieldPanel(\"banner_subtitle\"), StreamFieldPanel('content'), ] api_fields = [ APIField(\"title\"), APIField(\"banner_subtitle\"), APIField(\"banner_image\"),", "information.\"\"\" verbose_name = \"Home Page\" verbose_name_plural = \"Home Pages\" class FlexPage(Page): \"\"\"A Flexible", "utf-8 -*- \"\"\"Page models.\"\"\" from django.db import models from wagtail.admin.edit_handlers import FieldPanel, StreamFieldPanel", "('ImageGalleryBlock', ImageGalleryBlock()), ('CallToActionBlock', CallToActionBlock()), ], null=True, blank=True) content_panels = [ FieldPanel(\"title\", classname=\"full title\"),", "home page class.\"\"\" template = \"cms/pages/home_page.html\" subpage_types = ['pages.FlexPage'] banner_subtitle = models.CharField( max_length=50,", "api_fields = [ APIField(\"title\"), APIField(\"banner_subtitle\"), APIField(\"banner_image\"), APIField(\"banner_image_thumbnail\", serializer=ImageRenditionField(\"fill-100x100\", source=\"banner_image\")), APIField(\"content\"), ] class Meta:", "wagtail.images.edit_handlers import ImageChooserPanel from wagtail.core.models import Page from wagtail.api import APIField from wagtail.images.api.fields", "[ APIField(\"title\"), APIField(\"banner_subtitle\"), APIField(\"banner_image\"), APIField(\"banner_image_thumbnail\", serializer=ImageRenditionField(\"fill-100x100\", source=\"banner_image\")), APIField(\"content\"), ] class Meta: \"\"\"Meta information.\"\"\"", "purpose.\"\"\" template = \"cms/pages/flex_page.html\" subpage_types = [] content = StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock',", "Used for generic pages that don't have a true purpose.\"\"\" template = \"cms/pages/flex_page.html\"", "max_length=50, blank=True, null=True, help_text=\"An optional banner subtitle\" ) banner_image = models.ForeignKey( \"wagtailimages.Image\", null=True,", "ImageGalleryBlock()), ('CallToActionBlock', CallToActionBlock()), ], null=True, blank=True) content_panels = [ FieldPanel(\"title\", classname=\"full title\"), StreamFieldPanel('content'),", "from .streamfields import ContentBlock, ImageGalleryBlock, CallToActionBlock class HomePage(Page): \"\"\"A home page class.\"\"\" template", "optional banner subtitle\" ) banner_image = models.ForeignKey( \"wagtailimages.Image\", null=True, blank=False, on_delete=models.SET_NULL, related_name=\"+\", help_text=\"An", "wagtail.core.fields import StreamField from .streamfields import ContentBlock, ImageGalleryBlock, CallToActionBlock class HomePage(Page): \"\"\"A home", "\"\"\"A home page class.\"\"\" template = \"cms/pages/home_page.html\" subpage_types = ['pages.FlexPage'] banner_subtitle = models.CharField(", "APIField(\"content\"), ] class Meta: \"\"\"Meta information.\"\"\" verbose_name = \"Home Page\" verbose_name_plural = \"Home", "APIField(\"banner_subtitle\"), APIField(\"banner_image\"), APIField(\"banner_image_thumbnail\", serializer=ImageRenditionField(\"fill-100x100\", source=\"banner_image\")), APIField(\"content\"), ] class Meta: \"\"\"Meta information.\"\"\" verbose_name =", "Page from wagtail.api import APIField from wagtail.images.api.fields import ImageRenditionField from wagtail.core.fields import StreamField", ") content = StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()), ('CallToActionBlock', CallToActionBlock()), ], null=True, blank=True)", "from wagtail.admin.edit_handlers import FieldPanel, StreamFieldPanel from wagtail.images.edit_handlers import ImageChooserPanel from wagtail.core.models import Page", ") banner_image = models.ForeignKey( \"wagtailimages.Image\", null=True, blank=False, on_delete=models.SET_NULL, related_name=\"+\", help_text=\"An optional banner image\",", "\"wagtailimages.Image\", null=True, blank=False, on_delete=models.SET_NULL, related_name=\"+\", help_text=\"An optional banner image\", ) content = StreamField([", "banner_image = models.ForeignKey( \"wagtailimages.Image\", null=True, blank=False, on_delete=models.SET_NULL, related_name=\"+\", help_text=\"An optional banner image\", )", "FlexPage(Page): \"\"\"A Flexible page class. Used for generic pages that don't have a", "page class. Used for generic pages that don't have a true purpose.\"\"\" template", "import ContentBlock, ImageGalleryBlock, CallToActionBlock class HomePage(Page): \"\"\"A home page class.\"\"\" template = \"cms/pages/home_page.html\"", "that don't have a true purpose.\"\"\" template = \"cms/pages/flex_page.html\" subpage_types = [] content", "APIField(\"banner_image\"), APIField(\"banner_image_thumbnail\", serializer=ImageRenditionField(\"fill-100x100\", source=\"banner_image\")), APIField(\"content\"), ] class Meta: \"\"\"Meta information.\"\"\" verbose_name = \"Home", "from wagtail.api import APIField from wagtail.images.api.fields import ImageRenditionField from wagtail.core.fields import StreamField from", "= StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()), ('CallToActionBlock', CallToActionBlock()), ], null=True, blank=True) content_panels =", "classname=\"full title\"), ImageChooserPanel(\"banner_image\"), FieldPanel(\"banner_subtitle\"), StreamFieldPanel('content'), ] api_fields = [ APIField(\"title\"), APIField(\"banner_subtitle\"), APIField(\"banner_image\"), APIField(\"banner_image_thumbnail\",", "import FieldPanel, StreamFieldPanel from wagtail.images.edit_handlers import ImageChooserPanel from wagtail.core.models import Page from wagtail.api", "-*- coding: utf-8 -*- \"\"\"Page models.\"\"\" from django.db import models from wagtail.admin.edit_handlers import", "APIField from wagtail.images.api.fields import ImageRenditionField from wagtail.core.fields import StreamField from .streamfields import ContentBlock,", "image\", ) content = StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()), ('CallToActionBlock', CallToActionBlock()), ], null=True,", "subpage_types = ['pages.FlexPage'] banner_subtitle = models.CharField( max_length=50, blank=True, null=True, help_text=\"An optional banner subtitle\"", "serializer=ImageRenditionField(\"fill-100x100\", source=\"banner_image\")), APIField(\"content\"), ] class Meta: \"\"\"Meta information.\"\"\" verbose_name = \"Home Page\" verbose_name_plural", "content = StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()), ('CallToActionBlock', CallToActionBlock()), ], null=True, blank=True) content_panels", "class HomePage(Page): \"\"\"A home page class.\"\"\" template = \"cms/pages/home_page.html\" subpage_types = ['pages.FlexPage'] banner_subtitle", "] api_fields = [ APIField(\"title\"), APIField(\"content\"), ] class Meta: \"\"\"Meta information.\"\"\" verbose_name =", "null=True, blank=True) content_panels = [ FieldPanel(\"title\", classname=\"full title\"), StreamFieldPanel('content'), ] api_fields = [", "['pages.FlexPage'] banner_subtitle = models.CharField( max_length=50, blank=True, null=True, help_text=\"An optional banner subtitle\" ) banner_image", "import ImageRenditionField from wagtail.core.fields import StreamField from .streamfields import ContentBlock, ImageGalleryBlock, CallToActionBlock class", "Pages\" class FlexPage(Page): \"\"\"A Flexible page class. Used for generic pages that don't", "ImageChooserPanel from wagtail.core.models import Page from wagtail.api import APIField from wagtail.images.api.fields import ImageRenditionField", "('CallToActionBlock', CallToActionBlock()), ], null=True, blank=True) content_panels = [ FieldPanel(\"title\", classname=\"full title\"), StreamFieldPanel('content'), ]", "content_panels = [ FieldPanel(\"title\", classname=\"full title\"), StreamFieldPanel('content'), ] api_fields = [ APIField(\"title\"), APIField(\"content\"),", "for generic pages that don't have a true purpose.\"\"\" template = \"cms/pages/flex_page.html\" subpage_types", "FieldPanel(\"banner_subtitle\"), StreamFieldPanel('content'), ] api_fields = [ APIField(\"title\"), APIField(\"banner_subtitle\"), APIField(\"banner_image\"), APIField(\"banner_image_thumbnail\", serializer=ImageRenditionField(\"fill-100x100\", source=\"banner_image\")), APIField(\"content\"),", "('CallToActionBlock', CallToActionBlock()), ], null=True, blank=True) content_panels = [ FieldPanel(\"title\", classname=\"full title\"), ImageChooserPanel(\"banner_image\"), FieldPanel(\"banner_subtitle\"),", "don't have a true purpose.\"\"\" template = \"cms/pages/flex_page.html\" subpage_types = [] content =", "null=True, blank=False, on_delete=models.SET_NULL, related_name=\"+\", help_text=\"An optional banner image\", ) content = StreamField([ ('ContentBlock',", "[ FieldPanel(\"title\", classname=\"full title\"), ImageChooserPanel(\"banner_image\"), FieldPanel(\"banner_subtitle\"), StreamFieldPanel('content'), ] api_fields = [ APIField(\"title\"), APIField(\"banner_subtitle\"),", "StreamFieldPanel from wagtail.images.edit_handlers import ImageChooserPanel from wagtail.core.models import Page from wagtail.api import APIField", "models from wagtail.admin.edit_handlers import FieldPanel, StreamFieldPanel from wagtail.images.edit_handlers import ImageChooserPanel from wagtail.core.models import", "models.CharField( max_length=50, blank=True, null=True, help_text=\"An optional banner subtitle\" ) banner_image = models.ForeignKey( \"wagtailimages.Image\",", "= [ APIField(\"title\"), APIField(\"content\"), ] class Meta: \"\"\"Meta information.\"\"\" verbose_name = \"Flex Page\"", "have a true purpose.\"\"\" template = \"cms/pages/flex_page.html\" subpage_types = [] content = StreamField([", "<reponame>hyshka/wagtail-vue-talk # -*- coding: utf-8 -*- \"\"\"Page models.\"\"\" from django.db import models from", "StreamField from .streamfields import ContentBlock, ImageGalleryBlock, CallToActionBlock class HomePage(Page): \"\"\"A home page class.\"\"\"", "from wagtail.images.edit_handlers import ImageChooserPanel from wagtail.core.models import Page from wagtail.api import APIField from", "wagtail.core.models import Page from wagtail.api import APIField from wagtail.images.api.fields import ImageRenditionField from wagtail.core.fields", "= \"Home Pages\" class FlexPage(Page): \"\"\"A Flexible page class. Used for generic pages", "class Meta: \"\"\"Meta information.\"\"\" verbose_name = \"Home Page\" verbose_name_plural = \"Home Pages\" class", "ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()), ('CallToActionBlock', CallToActionBlock()), ], null=True, blank=True) content_panels = [ FieldPanel(\"title\", classname=\"full", "= [] content = StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()), ('CallToActionBlock', CallToActionBlock()), ], null=True,", "banner subtitle\" ) banner_image = models.ForeignKey( \"wagtailimages.Image\", null=True, blank=False, on_delete=models.SET_NULL, related_name=\"+\", help_text=\"An optional", "Page\" verbose_name_plural = \"Home Pages\" class FlexPage(Page): \"\"\"A Flexible page class. Used for", "= models.CharField( max_length=50, blank=True, null=True, help_text=\"An optional banner subtitle\" ) banner_image = models.ForeignKey(", "StreamFieldPanel('content'), ] api_fields = [ APIField(\"title\"), APIField(\"banner_subtitle\"), APIField(\"banner_image\"), APIField(\"banner_image_thumbnail\", serializer=ImageRenditionField(\"fill-100x100\", source=\"banner_image\")), APIField(\"content\"), ]", "ImageGalleryBlock()), ('CallToActionBlock', CallToActionBlock()), ], null=True, blank=True) content_panels = [ FieldPanel(\"title\", classname=\"full title\"), ImageChooserPanel(\"banner_image\"),", "import Page from wagtail.api import APIField from wagtail.images.api.fields import ImageRenditionField from wagtail.core.fields import", "CallToActionBlock class HomePage(Page): \"\"\"A home page class.\"\"\" template = \"cms/pages/home_page.html\" subpage_types = ['pages.FlexPage']", "models.\"\"\" from django.db import models from wagtail.admin.edit_handlers import FieldPanel, StreamFieldPanel from wagtail.images.edit_handlers import", "generic pages that don't have a true purpose.\"\"\" template = \"cms/pages/flex_page.html\" subpage_types =", "-*- \"\"\"Page models.\"\"\" from django.db import models from wagtail.admin.edit_handlers import FieldPanel, StreamFieldPanel from", "models.ForeignKey( \"wagtailimages.Image\", null=True, blank=False, on_delete=models.SET_NULL, related_name=\"+\", help_text=\"An optional banner image\", ) content =", "] api_fields = [ APIField(\"title\"), APIField(\"banner_subtitle\"), APIField(\"banner_image\"), APIField(\"banner_image_thumbnail\", serializer=ImageRenditionField(\"fill-100x100\", source=\"banner_image\")), APIField(\"content\"), ] class", "= [ FieldPanel(\"title\", classname=\"full title\"), StreamFieldPanel('content'), ] api_fields = [ APIField(\"title\"), APIField(\"content\"), ]", "[ APIField(\"title\"), APIField(\"content\"), ] class Meta: \"\"\"Meta information.\"\"\" verbose_name = \"Flex Page\" verbose_name_plural", "pages that don't have a true purpose.\"\"\" template = \"cms/pages/flex_page.html\" subpage_types = []", "true purpose.\"\"\" template = \"cms/pages/flex_page.html\" subpage_types = [] content = StreamField([ ('ContentBlock', ContentBlock()),", "= ['pages.FlexPage'] banner_subtitle = models.CharField( max_length=50, blank=True, null=True, help_text=\"An optional banner subtitle\" )", "], null=True, blank=True) content_panels = [ FieldPanel(\"title\", classname=\"full title\"), ImageChooserPanel(\"banner_image\"), FieldPanel(\"banner_subtitle\"), StreamFieldPanel('content'), ]", "wagtail.api import APIField from wagtail.images.api.fields import ImageRenditionField from wagtail.core.fields import StreamField from .streamfields", "\"cms/pages/flex_page.html\" subpage_types = [] content = StreamField([ ('ContentBlock', ContentBlock()), ('ImageGalleryBlock', ImageGalleryBlock()), ('CallToActionBlock', CallToActionBlock()),", "\"\"\"A Flexible page class. Used for generic pages that don't have a true", "from wagtail.core.models import Page from wagtail.api import APIField from wagtail.images.api.fields import ImageRenditionField from", "subtitle\" ) banner_image = models.ForeignKey( \"wagtailimages.Image\", null=True, blank=False, on_delete=models.SET_NULL, related_name=\"+\", help_text=\"An optional banner", "FieldPanel(\"title\", classname=\"full title\"), StreamFieldPanel('content'), ] api_fields = [ APIField(\"title\"), APIField(\"content\"), ] class Meta:", "content_panels = [ FieldPanel(\"title\", classname=\"full title\"), ImageChooserPanel(\"banner_image\"), FieldPanel(\"banner_subtitle\"), StreamFieldPanel('content'), ] api_fields = [", "title\"), ImageChooserPanel(\"banner_image\"), FieldPanel(\"banner_subtitle\"), StreamFieldPanel('content'), ] api_fields = [ APIField(\"title\"), APIField(\"banner_subtitle\"), APIField(\"banner_image\"), APIField(\"banner_image_thumbnail\", serializer=ImageRenditionField(\"fill-100x100\",", "from wagtail.core.fields import StreamField from .streamfields import ContentBlock, ImageGalleryBlock, CallToActionBlock class HomePage(Page): \"\"\"A", "page class.\"\"\" template = \"cms/pages/home_page.html\" subpage_types = ['pages.FlexPage'] banner_subtitle = models.CharField( max_length=50, blank=True," ]
[ "= None operation = None def __init__(self, num1=None, num2=None, op=None): self.operand1 = float(num1)", "self.local_server.register_instance(class_instance) def register_function(self, function): self.local_server.register_function(function) def run(self): self.local_server.serve_forever() class ClientThread(threading.Thread): def __init__(self, address):", "import threading import socketserver class MessageHandler(object): body = None def __init__(self, message): self.body", "= None def __init__(self, message): self.body = message.decode(\"utf-8\") def message_loads(self): if self.body: result", "from xmlrpc import client from xmlrpc.server import SimpleXMLRPCServer from xmlrpc.server import SimpleXMLRPCRequestHandler import", "try: self.operand1 = float(self.operand1) self.operand2 = float(self.operand2) except ValueError: print(\"Not be numbers\") return", "from xmlrpc.server import SimpleXMLRPCRequestHandler import threading import socketserver class MessageHandler(object): body = None", "MessageHandler(object): body = None def __init__(self, message): self.body = message.decode(\"utf-8\") def message_loads(self): if", "= None operand2 = None operation = None def __init__(self, num1=None, num2=None, op=None):", "self.body.split(\"|\") return result class MessageBuilder(object): operand1 = None operand2 = None operation =", "self.operand2 def message_builder(self): if self.operand1 and self.operand2 and self.operation: result = str(self.operand1) +", "\"|\" + str(self.operand2) return result class SimpleThreadedXMLRPCServer(socketserver.ThreadingMixIn, SimpleXMLRPCServer): pass class ServerThread(threading.Thread): def __init__(self,", "class SimpleThreadedXMLRPCServer(socketserver.ThreadingMixIn, SimpleXMLRPCServer): pass class ServerThread(threading.Thread): def __init__(self, address, port): threading.Thread.__init__(self) self.local_server =", "SimpleThreadedXMLRPCServer((address, port)) def register_class_functions(self, class_instance): self.local_server.register_instance(class_instance) def register_function(self, function): self.local_server.register_function(function) def run(self): self.local_server.serve_forever()", "class_instance): self.local_server.register_instance(class_instance) def register_function(self, function): self.local_server.register_function(function) def run(self): self.local_server.serve_forever() class ClientThread(threading.Thread): def __init__(self,", "get_client(self): return self.local_client def call_function(self, arg1, arg2): return self.local_client.function(arg1, arg2) def run(self): pass", "self.body: result = self.body.split(\"|\") return result class MessageBuilder(object): operand1 = None operand2 =", "= float(num2) self.operation = op def get_operands(self): try: self.operand1 = float(self.operand1) self.operand2 =", "self.operand1, self.operand2 def message_builder(self): if self.operand1 and self.operand2 and self.operation: result = str(self.operand1)", "operation = None def __init__(self, num1=None, num2=None, op=None): self.operand1 = float(num1) self.operand2 =", "None def __init__(self, num1=None, num2=None, op=None): self.operand1 = float(num1) self.operand2 = float(num2) self.operation", "def run(self): self.local_server.serve_forever() class ClientThread(threading.Thread): def __init__(self, address): threading.Thread.__init__(self) self.local_client = client.ServerProxy(address) def", "self.local_server.serve_forever() class ClientThread(threading.Thread): def __init__(self, address): threading.Thread.__init__(self) self.local_client = client.ServerProxy(address) def get_client(self): return", "num2=None, op=None): self.operand1 = float(num1) self.operand2 = float(num2) self.operation = op def get_operands(self):", "None operand2 = None operation = None def __init__(self, num1=None, num2=None, op=None): self.operand1", "import client from xmlrpc.server import SimpleXMLRPCServer from xmlrpc.server import SimpleXMLRPCRequestHandler import threading import", "operand2 = None operation = None def __init__(self, num1=None, num2=None, op=None): self.operand1 =", "<gh_stars>0 from xmlrpc import client from xmlrpc.server import SimpleXMLRPCServer from xmlrpc.server import SimpleXMLRPCRequestHandler", "self.operand2 = float(num2) self.operation = op def get_operands(self): try: self.operand1 = float(self.operand1) self.operand2", "print(\"Not be numbers\") return self.operand1, self.operand2 def message_builder(self): if self.operand1 and self.operand2 and", "ClientThread(threading.Thread): def __init__(self, address): threading.Thread.__init__(self) self.local_client = client.ServerProxy(address) def get_client(self): return self.local_client def", "self.local_client = client.ServerProxy(address) def get_client(self): return self.local_client def call_function(self, arg1, arg2): return self.local_client.function(arg1,", "+ \"|\" + str(self.operand2) return result class SimpleThreadedXMLRPCServer(socketserver.ThreadingMixIn, SimpleXMLRPCServer): pass class ServerThread(threading.Thread): def", "function): self.local_server.register_function(function) def run(self): self.local_server.serve_forever() class ClientThread(threading.Thread): def __init__(self, address): threading.Thread.__init__(self) self.local_client =", "num1=None, num2=None, op=None): self.operand1 = float(num1) self.operand2 = float(num2) self.operation = op def", "numbers\") return self.operand1, self.operand2 def message_builder(self): if self.operand1 and self.operand2 and self.operation: result", "= client.ServerProxy(address) def get_client(self): return self.local_client def call_function(self, arg1, arg2): return self.local_client.function(arg1, arg2)", "body = None def __init__(self, message): self.body = message.decode(\"utf-8\") def message_loads(self): if self.body:", "return result class MessageBuilder(object): operand1 = None operand2 = None operation = None", "result = str(self.operand1) + \"|\" + str(self.operation) + \"|\" + str(self.operand2) return result", "float(self.operand1) self.operand2 = float(self.operand2) except ValueError: print(\"Not be numbers\") return self.operand1, self.operand2 def", "float(num1) self.operand2 = float(num2) self.operation = op def get_operands(self): try: self.operand1 = float(self.operand1)", "xmlrpc.server import SimpleXMLRPCServer from xmlrpc.server import SimpleXMLRPCRequestHandler import threading import socketserver class MessageHandler(object):", "self.operand1 = float(num1) self.operand2 = float(num2) self.operation = op def get_operands(self): try: self.operand1", "class ClientThread(threading.Thread): def __init__(self, address): threading.Thread.__init__(self) self.local_client = client.ServerProxy(address) def get_client(self): return self.local_client", "threading.Thread.__init__(self) self.local_client = client.ServerProxy(address) def get_client(self): return self.local_client def call_function(self, arg1, arg2): return", "def __init__(self, address): threading.Thread.__init__(self) self.local_client = client.ServerProxy(address) def get_client(self): return self.local_client def call_function(self,", "threading import socketserver class MessageHandler(object): body = None def __init__(self, message): self.body =", "SimpleXMLRPCServer from xmlrpc.server import SimpleXMLRPCRequestHandler import threading import socketserver class MessageHandler(object): body =", "float(self.operand2) except ValueError: print(\"Not be numbers\") return self.operand1, self.operand2 def message_builder(self): if self.operand1", "None def __init__(self, message): self.body = message.decode(\"utf-8\") def message_loads(self): if self.body: result =", "__init__(self, num1=None, num2=None, op=None): self.operand1 = float(num1) self.operand2 = float(num2) self.operation = op", "None operation = None def __init__(self, num1=None, num2=None, op=None): self.operand1 = float(num1) self.operand2", "+ str(self.operation) + \"|\" + str(self.operand2) return result class SimpleThreadedXMLRPCServer(socketserver.ThreadingMixIn, SimpleXMLRPCServer): pass class", "= str(self.operand1) + \"|\" + str(self.operation) + \"|\" + str(self.operand2) return result class", "return result class SimpleThreadedXMLRPCServer(socketserver.ThreadingMixIn, SimpleXMLRPCServer): pass class ServerThread(threading.Thread): def __init__(self, address, port): threading.Thread.__init__(self)", "self.operation = op def get_operands(self): try: self.operand1 = float(self.operand1) self.operand2 = float(self.operand2) except", "port)) def register_class_functions(self, class_instance): self.local_server.register_instance(class_instance) def register_function(self, function): self.local_server.register_function(function) def run(self): self.local_server.serve_forever() class", "message.decode(\"utf-8\") def message_loads(self): if self.body: result = self.body.split(\"|\") return result class MessageBuilder(object): operand1", "result = self.body.split(\"|\") return result class MessageBuilder(object): operand1 = None operand2 = None", "operand1 = None operand2 = None operation = None def __init__(self, num1=None, num2=None,", "register_function(self, function): self.local_server.register_function(function) def run(self): self.local_server.serve_forever() class ClientThread(threading.Thread): def __init__(self, address): threading.Thread.__init__(self) self.local_client", "= float(self.operand2) except ValueError: print(\"Not be numbers\") return self.operand1, self.operand2 def message_builder(self): if", "str(self.operand1) + \"|\" + str(self.operation) + \"|\" + str(self.operand2) return result class SimpleThreadedXMLRPCServer(socketserver.ThreadingMixIn,", "self.operand1 = float(self.operand1) self.operand2 = float(self.operand2) except ValueError: print(\"Not be numbers\") return self.operand1,", "and self.operation: result = str(self.operand1) + \"|\" + str(self.operation) + \"|\" + str(self.operand2)", "class MessageBuilder(object): operand1 = None operand2 = None operation = None def __init__(self,", "+ str(self.operand2) return result class SimpleThreadedXMLRPCServer(socketserver.ThreadingMixIn, SimpleXMLRPCServer): pass class ServerThread(threading.Thread): def __init__(self, address,", "def get_client(self): return self.local_client def call_function(self, arg1, arg2): return self.local_client.function(arg1, arg2) def run(self):", "client from xmlrpc.server import SimpleXMLRPCServer from xmlrpc.server import SimpleXMLRPCRequestHandler import threading import socketserver", "import SimpleXMLRPCServer from xmlrpc.server import SimpleXMLRPCRequestHandler import threading import socketserver class MessageHandler(object): body", "except ValueError: print(\"Not be numbers\") return self.operand1, self.operand2 def message_builder(self): if self.operand1 and", "return self.operand1, self.operand2 def message_builder(self): if self.operand1 and self.operand2 and self.operation: result =", "op=None): self.operand1 = float(num1) self.operand2 = float(num2) self.operation = op def get_operands(self): try:", "if self.body: result = self.body.split(\"|\") return result class MessageBuilder(object): operand1 = None operand2", "\"|\" + str(self.operation) + \"|\" + str(self.operand2) return result class SimpleThreadedXMLRPCServer(socketserver.ThreadingMixIn, SimpleXMLRPCServer): pass", "def __init__(self, address, port): threading.Thread.__init__(self) self.local_server = SimpleThreadedXMLRPCServer((address, port)) def register_class_functions(self, class_instance): self.local_server.register_instance(class_instance)", "def register_class_functions(self, class_instance): self.local_server.register_instance(class_instance) def register_function(self, function): self.local_server.register_function(function) def run(self): self.local_server.serve_forever() class ClientThread(threading.Thread):", "and self.operand2 and self.operation: result = str(self.operand1) + \"|\" + str(self.operation) + \"|\"", "socketserver class MessageHandler(object): body = None def __init__(self, message): self.body = message.decode(\"utf-8\") def", "= message.decode(\"utf-8\") def message_loads(self): if self.body: result = self.body.split(\"|\") return result class MessageBuilder(object):", "__init__(self, address, port): threading.Thread.__init__(self) self.local_server = SimpleThreadedXMLRPCServer((address, port)) def register_class_functions(self, class_instance): self.local_server.register_instance(class_instance) def", "get_operands(self): try: self.operand1 = float(self.operand1) self.operand2 = float(self.operand2) except ValueError: print(\"Not be numbers\")", "xmlrpc import client from xmlrpc.server import SimpleXMLRPCServer from xmlrpc.server import SimpleXMLRPCRequestHandler import threading", "threading.Thread.__init__(self) self.local_server = SimpleThreadedXMLRPCServer((address, port)) def register_class_functions(self, class_instance): self.local_server.register_instance(class_instance) def register_function(self, function): self.local_server.register_function(function)", "import SimpleXMLRPCRequestHandler import threading import socketserver class MessageHandler(object): body = None def __init__(self,", "if self.operand1 and self.operand2 and self.operation: result = str(self.operand1) + \"|\" + str(self.operation)", "xmlrpc.server import SimpleXMLRPCRequestHandler import threading import socketserver class MessageHandler(object): body = None def", "+ \"|\" + str(self.operation) + \"|\" + str(self.operand2) return result class SimpleThreadedXMLRPCServer(socketserver.ThreadingMixIn, SimpleXMLRPCServer):", "self.local_server.register_function(function) def run(self): self.local_server.serve_forever() class ClientThread(threading.Thread): def __init__(self, address): threading.Thread.__init__(self) self.local_client = client.ServerProxy(address)", "message_loads(self): if self.body: result = self.body.split(\"|\") return result class MessageBuilder(object): operand1 = None", "be numbers\") return self.operand1, self.operand2 def message_builder(self): if self.operand1 and self.operand2 and self.operation:", "def message_builder(self): if self.operand1 and self.operand2 and self.operation: result = str(self.operand1) + \"|\"", "= None def __init__(self, num1=None, num2=None, op=None): self.operand1 = float(num1) self.operand2 = float(num2)", "self.body = message.decode(\"utf-8\") def message_loads(self): if self.body: result = self.body.split(\"|\") return result class", "port): threading.Thread.__init__(self) self.local_server = SimpleThreadedXMLRPCServer((address, port)) def register_class_functions(self, class_instance): self.local_server.register_instance(class_instance) def register_function(self, function):", "__init__(self, message): self.body = message.decode(\"utf-8\") def message_loads(self): if self.body: result = self.body.split(\"|\") return", "= self.body.split(\"|\") return result class MessageBuilder(object): operand1 = None operand2 = None operation", "self.operand2 and self.operation: result = str(self.operand1) + \"|\" + str(self.operation) + \"|\" +", "from xmlrpc.server import SimpleXMLRPCServer from xmlrpc.server import SimpleXMLRPCRequestHandler import threading import socketserver class", "def register_function(self, function): self.local_server.register_function(function) def run(self): self.local_server.serve_forever() class ClientThread(threading.Thread): def __init__(self, address): threading.Thread.__init__(self)", "self.operand2 = float(self.operand2) except ValueError: print(\"Not be numbers\") return self.operand1, self.operand2 def message_builder(self):", "result class SimpleThreadedXMLRPCServer(socketserver.ThreadingMixIn, SimpleXMLRPCServer): pass class ServerThread(threading.Thread): def __init__(self, address, port): threading.Thread.__init__(self) self.local_server", "str(self.operation) + \"|\" + str(self.operand2) return result class SimpleThreadedXMLRPCServer(socketserver.ThreadingMixIn, SimpleXMLRPCServer): pass class ServerThread(threading.Thread):", "SimpleXMLRPCServer): pass class ServerThread(threading.Thread): def __init__(self, address, port): threading.Thread.__init__(self) self.local_server = SimpleThreadedXMLRPCServer((address, port))", "def __init__(self, message): self.body = message.decode(\"utf-8\") def message_loads(self): if self.body: result = self.body.split(\"|\")", "class MessageHandler(object): body = None def __init__(self, message): self.body = message.decode(\"utf-8\") def message_loads(self):", "address, port): threading.Thread.__init__(self) self.local_server = SimpleThreadedXMLRPCServer((address, port)) def register_class_functions(self, class_instance): self.local_server.register_instance(class_instance) def register_function(self,", "import socketserver class MessageHandler(object): body = None def __init__(self, message): self.body = message.decode(\"utf-8\")", "SimpleThreadedXMLRPCServer(socketserver.ThreadingMixIn, SimpleXMLRPCServer): pass class ServerThread(threading.Thread): def __init__(self, address, port): threading.Thread.__init__(self) self.local_server = SimpleThreadedXMLRPCServer((address,", "register_class_functions(self, class_instance): self.local_server.register_instance(class_instance) def register_function(self, function): self.local_server.register_function(function) def run(self): self.local_server.serve_forever() class ClientThread(threading.Thread): def", "run(self): self.local_server.serve_forever() class ClientThread(threading.Thread): def __init__(self, address): threading.Thread.__init__(self) self.local_client = client.ServerProxy(address) def get_client(self):", "SimpleXMLRPCRequestHandler import threading import socketserver class MessageHandler(object): body = None def __init__(self, message):", "self.operation: result = str(self.operand1) + \"|\" + str(self.operation) + \"|\" + str(self.operand2) return", "= SimpleThreadedXMLRPCServer((address, port)) def register_class_functions(self, class_instance): self.local_server.register_instance(class_instance) def register_function(self, function): self.local_server.register_function(function) def run(self):", "message_builder(self): if self.operand1 and self.operand2 and self.operation: result = str(self.operand1) + \"|\" +", "class ServerThread(threading.Thread): def __init__(self, address, port): threading.Thread.__init__(self) self.local_server = SimpleThreadedXMLRPCServer((address, port)) def register_class_functions(self,", "float(num2) self.operation = op def get_operands(self): try: self.operand1 = float(self.operand1) self.operand2 = float(self.operand2)", "ValueError: print(\"Not be numbers\") return self.operand1, self.operand2 def message_builder(self): if self.operand1 and self.operand2", "address): threading.Thread.__init__(self) self.local_client = client.ServerProxy(address) def get_client(self): return self.local_client def call_function(self, arg1, arg2):", "= float(self.operand1) self.operand2 = float(self.operand2) except ValueError: print(\"Not be numbers\") return self.operand1, self.operand2", "str(self.operand2) return result class SimpleThreadedXMLRPCServer(socketserver.ThreadingMixIn, SimpleXMLRPCServer): pass class ServerThread(threading.Thread): def __init__(self, address, port):", "= float(num1) self.operand2 = float(num2) self.operation = op def get_operands(self): try: self.operand1 =", "self.operand1 and self.operand2 and self.operation: result = str(self.operand1) + \"|\" + str(self.operation) +", "MessageBuilder(object): operand1 = None operand2 = None operation = None def __init__(self, num1=None,", "self.local_server = SimpleThreadedXMLRPCServer((address, port)) def register_class_functions(self, class_instance): self.local_server.register_instance(class_instance) def register_function(self, function): self.local_server.register_function(function) def", "def get_operands(self): try: self.operand1 = float(self.operand1) self.operand2 = float(self.operand2) except ValueError: print(\"Not be", "message): self.body = message.decode(\"utf-8\") def message_loads(self): if self.body: result = self.body.split(\"|\") return result", "pass class ServerThread(threading.Thread): def __init__(self, address, port): threading.Thread.__init__(self) self.local_server = SimpleThreadedXMLRPCServer((address, port)) def", "__init__(self, address): threading.Thread.__init__(self) self.local_client = client.ServerProxy(address) def get_client(self): return self.local_client def call_function(self, arg1,", "op def get_operands(self): try: self.operand1 = float(self.operand1) self.operand2 = float(self.operand2) except ValueError: print(\"Not", "def __init__(self, num1=None, num2=None, op=None): self.operand1 = float(num1) self.operand2 = float(num2) self.operation =", "client.ServerProxy(address) def get_client(self): return self.local_client def call_function(self, arg1, arg2): return self.local_client.function(arg1, arg2) def", "ServerThread(threading.Thread): def __init__(self, address, port): threading.Thread.__init__(self) self.local_server = SimpleThreadedXMLRPCServer((address, port)) def register_class_functions(self, class_instance):", "def message_loads(self): if self.body: result = self.body.split(\"|\") return result class MessageBuilder(object): operand1 =", "result class MessageBuilder(object): operand1 = None operand2 = None operation = None def", "= op def get_operands(self): try: self.operand1 = float(self.operand1) self.operand2 = float(self.operand2) except ValueError:" ]
[ "file.write('#!/usr/bin/perl\\n') file.close() self.assertTrue(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) # Test invalid shebang file = open(os.path.join(self.tmpdir, testfilename), \"w\")", "function.\"\"\" # Test we get False if target is not in the given", "file is in the list, get a list with only the pod file", "\"test\" file = open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/perl\\n') file.close() self.assertTrue(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) # Test invalid", "from quattordocbuild import sourcehandler class SourcehandlerTest(TestCase): \"\"\"Test class for sourcehandler.\"\"\" def setUp(self): \"\"\"Set", "'pm', 'pl', 'pan']: testfile = \"test.%s\" % extension self.assertTrue(sourcehandler.is_wanted_file('', testfile)) # Test invalid", "[])) # Test for wrong subdir self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', ['schema.pan'])) # Test for a correct", "self.assertEqual(sourcehandler.maven_clean_compile(repoloc), 0) def test_is_wanted_file(self): \"\"\"Test is_wanted_file function.\"\"\" # Test valid extensions for extension", "Test valid extensions for extension in ['pod', 'pm', 'pl', 'pan']: testfile = \"test.%s\"", "given path self.assertFalse(sourcehandler.is_wanted_dir('/bogusdir/test', [])) # Test for False on empty fileslist self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', []))", "'test/lib/perl/test.pm' testpodfile = 'test/doc/pod/test.pod' # Add a correct item to an empty list", "file.write(\"test\\n\") file.close() self.assertEquals(sourcehandler.list_source_files(fulltestdir), [os.path.join(fulltestdir, testfile)]) def test_get_source_files(self): \"\"\"Test get_source_files function.\"\"\" self.assertEquals(sourcehandler.get_source_files(self.tmpdir, False), [])", "extension self.assertTrue(sourcehandler.is_wanted_file('', testfile)) # Test invalid extensions for extension in ['tpl', 'txt', 'xml']:", "[])) # Test for False on empty fileslist self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', [])) # Test for", "testfile)]) def test_get_source_files(self): \"\"\"Test get_source_files function.\"\"\" self.assertEquals(sourcehandler.get_source_files(self.tmpdir, False), []) self.assertFalse(sourcehandler.get_source_files(self.tmpdir, True)) def suite(self):", "Add a pod file when a pm file is in the list, get", "is_wanted_file function.\"\"\" # Test valid extensions for extension in ['pod', 'pm', 'pl', 'pan']:", "# Test invalid shebang file = open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/python\\n') file.close() self.assertFalse(sourcehandler.is_wanted_file(self.tmpdir, testfilename))", "# Test for wrong subdir self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', ['schema.pan'])) # Test for a correct path", "correct item to an empty list self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, []), ['test/lib/perl/test.pm']) # Add a", "wrong subdir self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', ['schema.pan'])) # Test for a correct path self.assertTrue(sourcehandler.is_wanted_dir('/tmp/target/doc/pod', ['schema.pan'])) def", "= open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/perl\\n') file.close() self.assertTrue(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) # Test invalid shebang file", "['pod', 'pm', 'pl', 'pan']: testfile = \"test.%s\" % extension self.assertTrue(sourcehandler.is_wanted_file('', testfile)) # Test", "back self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, [testpodfile]), [testpodfile]) def test_list_source_files(self): \"\"\"Test list_source_files function.\"\"\" # Test a", "valid shebang testfilename = \"test\" file = open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/perl\\n') file.close() self.assertTrue(sourcehandler.is_wanted_file(self.tmpdir,", "it fails on a empty dir self.assertNotEqual(sourcehandler.maven_clean_compile(repoloc), 0) # test if it can", "self.assertFalse(sourcehandler.is_wanted_file('', testfile)) # Test valid shebang testfilename = \"test\" file = open(os.path.join(self.tmpdir, testfilename),", "is in the list, get a list with only the pod file back", "open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/python\\n') file.close() self.assertFalse(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) def test_is_wanted_dir(self): \"\"\"Test is_wanted_dir function.\"\"\" #", "valid extensions for extension in ['pod', 'pm', 'pl', 'pan']: testfile = \"test.%s\" %", "False if target is not in the given path self.assertFalse(sourcehandler.is_wanted_dir('/bogusdir/test', [])) # Test", "get False if target is not in the given path self.assertFalse(sourcehandler.is_wanted_dir('/bogusdir/test', [])) #", "dir.\"\"\" shutil.rmtree(self.tmpdir) def test_maven_clean_compile(self): \"\"\"Test maven_clean_compile.\"\"\" repoloc = os.path.join(self.tmpdir, \"test\") os.makedirs(repoloc) # test", "0 file = open(os.path.join(repoloc, \"pom.xml\"), \"w\") file.write('<project><modelVersion>4.0.0</modelVersion><groupId>test</groupId>') file.write('<artifactId>test</artifactId><version>1</version></project>') file.close() self.assertEqual(sourcehandler.maven_clean_compile(repoloc), 0) def test_is_wanted_file(self):", "testfile = 'test.pod' fulltestdir = os.path.join(self.tmpdir, 'target/doc/pod') os.makedirs(fulltestdir) file = open(os.path.join(fulltestdir, testfile), 'w')", "\"w\") file.write('#!/usr/bin/perl\\n') file.close() self.assertTrue(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) # Test invalid shebang file = open(os.path.join(self.tmpdir, testfilename),", "testfilename)) def test_is_wanted_dir(self): \"\"\"Test is_wanted_dir function.\"\"\" # Test we get False if target", "if it can run a basic pom.xml and return 0 file = open(os.path.join(repoloc,", "in the list, get a list with only the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pod',", "a pod file is in the list, get a list with only the", "a empty dir self.assertNotEqual(sourcehandler.maven_clean_compile(repoloc), 0) # test if it can run a basic", "\"pom.xml\"), \"w\") file.write('<project><modelVersion>4.0.0</modelVersion><groupId>test</groupId>') file.write('<artifactId>test</artifactId><version>1</version></project>') file.close() self.assertEqual(sourcehandler.maven_clean_compile(repoloc), 0) def test_is_wanted_file(self): \"\"\"Test is_wanted_file function.\"\"\" #", "# Test for False on empty fileslist self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', [])) # Test for wrong", "self.assertFalse(sourcehandler.is_wanted_dir('/bogusdir/test', [])) # Test for False on empty fileslist self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', [])) # Test", "it can run a basic pom.xml and return 0 file = open(os.path.join(repoloc, \"pom.xml\"),", "an empty list self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, []), ['test/lib/perl/test.pm']) # Add a pod file when", "in the given path self.assertFalse(sourcehandler.is_wanted_dir('/bogusdir/test', [])) # Test for False on empty fileslist", "[testpodfile]), [testpodfile]) def test_list_source_files(self): \"\"\"Test list_source_files function.\"\"\" # Test a bogus dir self.assertEquals(sourcehandler.list_source_files(self.tmpdir),", "self.assertEquals(sourcehandler.list_source_files(self.tmpdir), []) # Test a correct dir testfile = 'test.pod' fulltestdir = os.path.join(self.tmpdir,", "file when a pm file is in the list, get a list with", "'test/doc/pod/test.pod' # Add a correct item to an empty list self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, []),", "'txt', 'xml']: testfile = \"test.%s\" % extension self.assertFalse(sourcehandler.is_wanted_file('', testfile)) # Test valid shebang", "from unittest import TestCase, main, TestLoader sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib'))) # noqa from quattordocbuild import", "correct path self.assertTrue(sourcehandler.is_wanted_dir('/tmp/target/doc/pod', ['schema.pan'])) def test_handle_duplicates(self): \"\"\"Test handle_duplicates function.\"\"\" testperlfile = 'test/lib/perl/test.pm' testpodfile", "file = open(os.path.join(repoloc, \"pom.xml\"), \"w\") file.write('<project><modelVersion>4.0.0</modelVersion><groupId>test</groupId>') file.write('<artifactId>test</artifactId><version>1</version></project>') file.close() self.assertEqual(sourcehandler.maven_clean_compile(repoloc), 0) def test_is_wanted_file(self): \"\"\"Test", "when a pm file is in the list, get a list with only", "mkdtemp() def tearDown(self): \"\"\"Remove temp dir.\"\"\" shutil.rmtree(self.tmpdir) def test_maven_clean_compile(self): \"\"\"Test maven_clean_compile.\"\"\" repoloc =", "\"test\") os.makedirs(repoloc) # test if it fails on a empty dir self.assertNotEqual(sourcehandler.maven_clean_compile(repoloc), 0)", "self.assertEquals(sourcehandler.handle_duplicates('test.pod', testpodfile, [testperlfile]), [testpodfile]) # Add a pm file when a pod file", "file.write('#!/usr/bin/python\\n') file.close() self.assertFalse(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) def test_is_wanted_dir(self): \"\"\"Test is_wanted_dir function.\"\"\" # Test we get", "for sourcehandler.\"\"\" def setUp(self): \"\"\"Set up temp dir for tests.\"\"\" self.tmpdir = mkdtemp()", "os.path.join(self.tmpdir, \"test\") os.makedirs(repoloc) # test if it fails on a empty dir self.assertNotEqual(sourcehandler.maven_clean_compile(repoloc),", "self.assertTrue(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) # Test invalid shebang file = open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/python\\n') file.close()", "self.assertTrue(sourcehandler.is_wanted_dir('/tmp/target/doc/pod', ['schema.pan'])) def test_handle_duplicates(self): \"\"\"Test handle_duplicates function.\"\"\" testperlfile = 'test/lib/perl/test.pm' testpodfile = 'test/doc/pod/test.pod'", "all the testcases in this module.\"\"\" return TestLoader().loadTestsFromTestCase(SourcehandlerTest) if __name__ == '__main__': main()", "# Test invalid extensions for extension in ['tpl', 'txt', 'xml']: testfile = \"test.%s\"", "maven_clean_compile.\"\"\" repoloc = os.path.join(self.tmpdir, \"test\") os.makedirs(repoloc) # test if it fails on a", "tearDown(self): \"\"\"Remove temp dir.\"\"\" shutil.rmtree(self.tmpdir) def test_maven_clean_compile(self): \"\"\"Test maven_clean_compile.\"\"\" repoloc = os.path.join(self.tmpdir, \"test\")", "pod file is in the list, get a list with only the pod", "and return 0 file = open(os.path.join(repoloc, \"pom.xml\"), \"w\") file.write('<project><modelVersion>4.0.0</modelVersion><groupId>test</groupId>') file.write('<artifactId>test</artifactId><version>1</version></project>') file.close() self.assertEqual(sourcehandler.maven_clean_compile(repoloc), 0)", "a list with only the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pod', testpodfile, [testperlfile]), [testpodfile]) #", "= \"test.%s\" % extension self.assertFalse(sourcehandler.is_wanted_file('', testfile)) # Test valid shebang testfilename = \"test\"", "if target is not in the given path self.assertFalse(sourcehandler.is_wanted_dir('/bogusdir/test', [])) # Test for", "[os.path.join(fulltestdir, testfile)]) def test_get_source_files(self): \"\"\"Test get_source_files function.\"\"\" self.assertEquals(sourcehandler.get_source_files(self.tmpdir, False), []) self.assertFalse(sourcehandler.get_source_files(self.tmpdir, True)) def", "for False on empty fileslist self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', [])) # Test for wrong subdir self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/',", "shutil from tempfile import mkdtemp from unittest import TestCase, main, TestLoader sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib')))", "test_is_wanted_file(self): \"\"\"Test is_wanted_file function.\"\"\" # Test valid extensions for extension in ['pod', 'pm',", "testperlfile, [testpodfile]), [testpodfile]) def test_list_source_files(self): \"\"\"Test list_source_files function.\"\"\" # Test a bogus dir", "Test valid shebang testfilename = \"test\" file = open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/perl\\n') file.close()", "# Test we get False if target is not in the given path", "= 'test/doc/pod/test.pod' # Add a correct item to an empty list self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile,", "testfilename)) # Test invalid shebang file = open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/python\\n') file.close() self.assertFalse(sourcehandler.is_wanted_file(self.tmpdir,", "file.close() self.assertEquals(sourcehandler.list_source_files(fulltestdir), [os.path.join(fulltestdir, testfile)]) def test_get_source_files(self): \"\"\"Test get_source_files function.\"\"\" self.assertEquals(sourcehandler.get_source_files(self.tmpdir, False), []) self.assertFalse(sourcehandler.get_source_files(self.tmpdir,", "fails on a empty dir self.assertNotEqual(sourcehandler.maven_clean_compile(repoloc), 0) # test if it can run", "import mkdtemp from unittest import TestCase, main, TestLoader sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib'))) # noqa from", "pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pod', testpodfile, [testperlfile]), [testpodfile]) # Add a pm file when", "'w') file.write(\"test\\n\") file.close() self.assertEquals(sourcehandler.list_source_files(fulltestdir), [os.path.join(fulltestdir, testfile)]) def test_get_source_files(self): \"\"\"Test get_source_files function.\"\"\" self.assertEquals(sourcehandler.get_source_files(self.tmpdir, False),", "['schema.pan'])) # Test for a correct path self.assertTrue(sourcehandler.is_wanted_dir('/tmp/target/doc/pod', ['schema.pan'])) def test_handle_duplicates(self): \"\"\"Test handle_duplicates", "= open(os.path.join(fulltestdir, testfile), 'w') file.write(\"test\\n\") file.close() self.assertEquals(sourcehandler.list_source_files(fulltestdir), [os.path.join(fulltestdir, testfile)]) def test_get_source_files(self): \"\"\"Test get_source_files", "for wrong subdir self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', ['schema.pan'])) # Test for a correct path self.assertTrue(sourcehandler.is_wanted_dir('/tmp/target/doc/pod', ['schema.pan']))", "class for sourcehandler.\"\"\" import os import sys import shutil from tempfile import mkdtemp", "handle_duplicates function.\"\"\" testperlfile = 'test/lib/perl/test.pm' testpodfile = 'test/doc/pod/test.pod' # Add a correct item", "file = open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/python\\n') file.close() self.assertFalse(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) def test_is_wanted_dir(self): \"\"\"Test is_wanted_dir", "a pm file is in the list, get a list with only the", "back self.assertEquals(sourcehandler.handle_duplicates('test.pod', testpodfile, [testperlfile]), [testpodfile]) # Add a pm file when a pod", "basic pom.xml and return 0 file = open(os.path.join(repoloc, \"pom.xml\"), \"w\") file.write('<project><modelVersion>4.0.0</modelVersion><groupId>test</groupId>') file.write('<artifactId>test</artifactId><version>1</version></project>') file.close()", "Test we get False if target is not in the given path self.assertFalse(sourcehandler.is_wanted_dir('/bogusdir/test',", "for a correct path self.assertTrue(sourcehandler.is_wanted_dir('/tmp/target/doc/pod', ['schema.pan'])) def test_handle_duplicates(self): \"\"\"Test handle_duplicates function.\"\"\" testperlfile =", "a pm file when a pod file is in the list, get a", "when a pod file is in the list, get a list with only", "path self.assertFalse(sourcehandler.is_wanted_dir('/bogusdir/test', [])) # Test for False on empty fileslist self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', [])) #", "= open(os.path.join(repoloc, \"pom.xml\"), \"w\") file.write('<project><modelVersion>4.0.0</modelVersion><groupId>test</groupId>') file.write('<artifactId>test</artifactId><version>1</version></project>') file.close() self.assertEqual(sourcehandler.maven_clean_compile(repoloc), 0) def test_is_wanted_file(self): \"\"\"Test is_wanted_file", "# Add a pm file when a pod file is in the list,", "file back self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, [testpodfile]), [testpodfile]) def test_list_source_files(self): \"\"\"Test list_source_files function.\"\"\" # Test", "open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/perl\\n') file.close() self.assertTrue(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) # Test invalid shebang file =", "testpodfile, [testperlfile]), [testpodfile]) # Add a pm file when a pod file is", "in ['tpl', 'txt', 'xml']: testfile = \"test.%s\" % extension self.assertFalse(sourcehandler.is_wanted_file('', testfile)) # Test", "up temp dir for tests.\"\"\" self.tmpdir = mkdtemp() def tearDown(self): \"\"\"Remove temp dir.\"\"\"", "list with only the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pod', testpodfile, [testperlfile]), [testpodfile]) # Add", "class for sourcehandler.\"\"\" def setUp(self): \"\"\"Set up temp dir for tests.\"\"\" self.tmpdir =", "# noqa from quattordocbuild import sourcehandler class SourcehandlerTest(TestCase): \"\"\"Test class for sourcehandler.\"\"\" def", "get_source_files function.\"\"\" self.assertEquals(sourcehandler.get_source_files(self.tmpdir, False), []) self.assertFalse(sourcehandler.get_source_files(self.tmpdir, True)) def suite(self): \"\"\"Return all the testcases", "Test for wrong subdir self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', ['schema.pan'])) # Test for a correct path self.assertTrue(sourcehandler.is_wanted_dir('/tmp/target/doc/pod',", "import TestCase, main, TestLoader sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib'))) # noqa from quattordocbuild import sourcehandler class", "the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, [testpodfile]), [testpodfile]) def test_list_source_files(self): \"\"\"Test list_source_files function.\"\"\"", "bogus dir self.assertEquals(sourcehandler.list_source_files(self.tmpdir), []) # Test a correct dir testfile = 'test.pod' fulltestdir", "testfilename), \"w\") file.write('#!/usr/bin/perl\\n') file.close() self.assertTrue(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) # Test invalid shebang file = open(os.path.join(self.tmpdir,", "is not in the given path self.assertFalse(sourcehandler.is_wanted_dir('/bogusdir/test', [])) # Test for False on", "we get False if target is not in the given path self.assertFalse(sourcehandler.is_wanted_dir('/bogusdir/test', []))", "only the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, [testpodfile]), [testpodfile]) def test_list_source_files(self): \"\"\"Test list_source_files", "TestLoader sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib'))) # noqa from quattordocbuild import sourcehandler class SourcehandlerTest(TestCase): \"\"\"Test class", "self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', ['schema.pan'])) # Test for a correct path self.assertTrue(sourcehandler.is_wanted_dir('/tmp/target/doc/pod', ['schema.pan'])) def test_handle_duplicates(self): \"\"\"Test", "Test for a correct path self.assertTrue(sourcehandler.is_wanted_dir('/tmp/target/doc/pod', ['schema.pan'])) def test_handle_duplicates(self): \"\"\"Test handle_duplicates function.\"\"\" testperlfile", "empty list self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, []), ['test/lib/perl/test.pm']) # Add a pod file when a", "def test_is_wanted_dir(self): \"\"\"Test is_wanted_dir function.\"\"\" # Test we get False if target is", "testfile), 'w') file.write(\"test\\n\") file.close() self.assertEquals(sourcehandler.list_source_files(fulltestdir), [os.path.join(fulltestdir, testfile)]) def test_get_source_files(self): \"\"\"Test get_source_files function.\"\"\" self.assertEquals(sourcehandler.get_source_files(self.tmpdir,", "def tearDown(self): \"\"\"Remove temp dir.\"\"\" shutil.rmtree(self.tmpdir) def test_maven_clean_compile(self): \"\"\"Test maven_clean_compile.\"\"\" repoloc = os.path.join(self.tmpdir,", "pm file when a pod file is in the list, get a list", "# test if it fails on a empty dir self.assertNotEqual(sourcehandler.maven_clean_compile(repoloc), 0) # test", "open(os.path.join(repoloc, \"pom.xml\"), \"w\") file.write('<project><modelVersion>4.0.0</modelVersion><groupId>test</groupId>') file.write('<artifactId>test</artifactId><version>1</version></project>') file.close() self.assertEqual(sourcehandler.maven_clean_compile(repoloc), 0) def test_is_wanted_file(self): \"\"\"Test is_wanted_file function.\"\"\"", "the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pod', testpodfile, [testperlfile]), [testpodfile]) # Add a pm file", "dir self.assertEquals(sourcehandler.list_source_files(self.tmpdir), []) # Test a correct dir testfile = 'test.pod' fulltestdir =", "target is not in the given path self.assertFalse(sourcehandler.is_wanted_dir('/bogusdir/test', [])) # Test for False", "import os import sys import shutil from tempfile import mkdtemp from unittest import", "unittest import TestCase, main, TestLoader sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib'))) # noqa from quattordocbuild import sourcehandler", "temp dir for tests.\"\"\" self.tmpdir = mkdtemp() def tearDown(self): \"\"\"Remove temp dir.\"\"\" shutil.rmtree(self.tmpdir)", "def test_list_source_files(self): \"\"\"Test list_source_files function.\"\"\" # Test a bogus dir self.assertEquals(sourcehandler.list_source_files(self.tmpdir), []) #", "testperlfile, []), ['test/lib/perl/test.pm']) # Add a pod file when a pm file is", "Test invalid shebang file = open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/python\\n') file.close() self.assertFalse(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) def", "fileslist self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', [])) # Test for wrong subdir self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', ['schema.pan'])) # Test for", "self.assertEquals(sourcehandler.list_source_files(fulltestdir), [os.path.join(fulltestdir, testfile)]) def test_get_source_files(self): \"\"\"Test get_source_files function.\"\"\" self.assertEquals(sourcehandler.get_source_files(self.tmpdir, False), []) self.assertFalse(sourcehandler.get_source_files(self.tmpdir, True))", "a correct path self.assertTrue(sourcehandler.is_wanted_dir('/tmp/target/doc/pod', ['schema.pan'])) def test_handle_duplicates(self): \"\"\"Test handle_duplicates function.\"\"\" testperlfile = 'test/lib/perl/test.pm'", "[testpodfile]) # Add a pm file when a pod file is in the", "'../lib'))) # noqa from quattordocbuild import sourcehandler class SourcehandlerTest(TestCase): \"\"\"Test class for sourcehandler.\"\"\"", "invalid extensions for extension in ['tpl', 'txt', 'xml']: testfile = \"test.%s\" % extension", "[testpodfile]) def test_list_source_files(self): \"\"\"Test list_source_files function.\"\"\" # Test a bogus dir self.assertEquals(sourcehandler.list_source_files(self.tmpdir), [])", "def test_handle_duplicates(self): \"\"\"Test handle_duplicates function.\"\"\" testperlfile = 'test/lib/perl/test.pm' testpodfile = 'test/doc/pod/test.pod' # Add", "mkdtemp from unittest import TestCase, main, TestLoader sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib'))) # noqa from quattordocbuild", "def test_maven_clean_compile(self): \"\"\"Test maven_clean_compile.\"\"\" repoloc = os.path.join(self.tmpdir, \"test\") os.makedirs(repoloc) # test if it", "noqa from quattordocbuild import sourcehandler class SourcehandlerTest(TestCase): \"\"\"Test class for sourcehandler.\"\"\" def setUp(self):", "extensions for extension in ['tpl', 'txt', 'xml']: testfile = \"test.%s\" % extension self.assertFalse(sourcehandler.is_wanted_file('',", "Test a bogus dir self.assertEquals(sourcehandler.list_source_files(self.tmpdir), []) # Test a correct dir testfile =", "= \"test\" file = open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/perl\\n') file.close() self.assertTrue(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) # Test", "quattordocbuild import sourcehandler class SourcehandlerTest(TestCase): \"\"\"Test class for sourcehandler.\"\"\" def setUp(self): \"\"\"Set up", "def test_get_source_files(self): \"\"\"Test get_source_files function.\"\"\" self.assertEquals(sourcehandler.get_source_files(self.tmpdir, False), []) self.assertFalse(sourcehandler.get_source_files(self.tmpdir, True)) def suite(self): \"\"\"Return", "test_get_source_files(self): \"\"\"Test get_source_files function.\"\"\" self.assertEquals(sourcehandler.get_source_files(self.tmpdir, False), []) self.assertFalse(sourcehandler.get_source_files(self.tmpdir, True)) def suite(self): \"\"\"Return all", "testfile = \"test.%s\" % extension self.assertTrue(sourcehandler.is_wanted_file('', testfile)) # Test invalid extensions for extension", "list, get a list with only the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, [testpodfile]),", "function.\"\"\" # Test valid extensions for extension in ['pod', 'pm', 'pl', 'pan']: testfile", "tests.\"\"\" self.tmpdir = mkdtemp() def tearDown(self): \"\"\"Remove temp dir.\"\"\" shutil.rmtree(self.tmpdir) def test_maven_clean_compile(self): \"\"\"Test", "pod file when a pm file is in the list, get a list", "fulltestdir = os.path.join(self.tmpdir, 'target/doc/pod') os.makedirs(fulltestdir) file = open(os.path.join(fulltestdir, testfile), 'w') file.write(\"test\\n\") file.close() self.assertEquals(sourcehandler.list_source_files(fulltestdir),", "repoloc = os.path.join(self.tmpdir, \"test\") os.makedirs(repoloc) # test if it fails on a empty", "Add a correct item to an empty list self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, []), ['test/lib/perl/test.pm']) #", "temp dir.\"\"\" shutil.rmtree(self.tmpdir) def test_maven_clean_compile(self): \"\"\"Test maven_clean_compile.\"\"\" repoloc = os.path.join(self.tmpdir, \"test\") os.makedirs(repoloc) #", "to an empty list self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, []), ['test/lib/perl/test.pm']) # Add a pod file", "\"test.%s\" % extension self.assertTrue(sourcehandler.is_wanted_file('', testfile)) # Test invalid extensions for extension in ['tpl',", "<reponame>jouvin/release \"\"\"Test class for sourcehandler.\"\"\" import os import sys import shutil from tempfile", "[]) self.assertFalse(sourcehandler.get_source_files(self.tmpdir, True)) def suite(self): \"\"\"Return all the testcases in this module.\"\"\" return", "for extension in ['pod', 'pm', 'pl', 'pan']: testfile = \"test.%s\" % extension self.assertTrue(sourcehandler.is_wanted_file('',", "os.path.join(self.tmpdir, 'target/doc/pod') os.makedirs(fulltestdir) file = open(os.path.join(fulltestdir, testfile), 'w') file.write(\"test\\n\") file.close() self.assertEquals(sourcehandler.list_source_files(fulltestdir), [os.path.join(fulltestdir, testfile)])", "# Test valid extensions for extension in ['pod', 'pm', 'pl', 'pan']: testfile =", "testpodfile = 'test/doc/pod/test.pod' # Add a correct item to an empty list self.assertEquals(sourcehandler.handle_duplicates('test.pm',", "\"\"\"Test get_source_files function.\"\"\" self.assertEquals(sourcehandler.get_source_files(self.tmpdir, False), []) self.assertFalse(sourcehandler.get_source_files(self.tmpdir, True)) def suite(self): \"\"\"Return all the", "sourcehandler.\"\"\" import os import sys import shutil from tempfile import mkdtemp from unittest", "the list, get a list with only the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile,", "not in the given path self.assertFalse(sourcehandler.is_wanted_dir('/bogusdir/test', [])) # Test for False on empty", "suite(self): \"\"\"Return all the testcases in this module.\"\"\" return TestLoader().loadTestsFromTestCase(SourcehandlerTest) if __name__ ==", "tempfile import mkdtemp from unittest import TestCase, main, TestLoader sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib'))) # noqa", "os.makedirs(repoloc) # test if it fails on a empty dir self.assertNotEqual(sourcehandler.maven_clean_compile(repoloc), 0) #", "= \"test.%s\" % extension self.assertTrue(sourcehandler.is_wanted_file('', testfile)) # Test invalid extensions for extension in", "empty dir self.assertNotEqual(sourcehandler.maven_clean_compile(repoloc), 0) # test if it can run a basic pom.xml", "shebang testfilename = \"test\" file = open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/perl\\n') file.close() self.assertTrue(sourcehandler.is_wanted_file(self.tmpdir, testfilename))", "def setUp(self): \"\"\"Set up temp dir for tests.\"\"\" self.tmpdir = mkdtemp() def tearDown(self):", "0) def test_is_wanted_file(self): \"\"\"Test is_wanted_file function.\"\"\" # Test valid extensions for extension in", "[]), ['test/lib/perl/test.pm']) # Add a pod file when a pm file is in", "# Add a correct item to an empty list self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, []), ['test/lib/perl/test.pm'])", "\"\"\"Return all the testcases in this module.\"\"\" return TestLoader().loadTestsFromTestCase(SourcehandlerTest) if __name__ == '__main__':", "the given path self.assertFalse(sourcehandler.is_wanted_dir('/bogusdir/test', [])) # Test for False on empty fileslist self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/',", "% extension self.assertTrue(sourcehandler.is_wanted_file('', testfile)) # Test invalid extensions for extension in ['tpl', 'txt',", "shutil.rmtree(self.tmpdir) def test_maven_clean_compile(self): \"\"\"Test maven_clean_compile.\"\"\" repoloc = os.path.join(self.tmpdir, \"test\") os.makedirs(repoloc) # test if", "testperlfile = 'test/lib/perl/test.pm' testpodfile = 'test/doc/pod/test.pod' # Add a correct item to an", "# Test a bogus dir self.assertEquals(sourcehandler.list_source_files(self.tmpdir), []) # Test a correct dir testfile", "for tests.\"\"\" self.tmpdir = mkdtemp() def tearDown(self): \"\"\"Remove temp dir.\"\"\" shutil.rmtree(self.tmpdir) def test_maven_clean_compile(self):", "\"\"\"Test class for sourcehandler.\"\"\" import os import sys import shutil from tempfile import", "correct dir testfile = 'test.pod' fulltestdir = os.path.join(self.tmpdir, 'target/doc/pod') os.makedirs(fulltestdir) file = open(os.path.join(fulltestdir,", "import sys import shutil from tempfile import mkdtemp from unittest import TestCase, main,", "\"w\") file.write('<project><modelVersion>4.0.0</modelVersion><groupId>test</groupId>') file.write('<artifactId>test</artifactId><version>1</version></project>') file.close() self.assertEqual(sourcehandler.maven_clean_compile(repoloc), 0) def test_is_wanted_file(self): \"\"\"Test is_wanted_file function.\"\"\" # Test", "SourcehandlerTest(TestCase): \"\"\"Test class for sourcehandler.\"\"\" def setUp(self): \"\"\"Set up temp dir for tests.\"\"\"", "sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib'))) # noqa from quattordocbuild import sourcehandler class SourcehandlerTest(TestCase): \"\"\"Test class for", "dir testfile = 'test.pod' fulltestdir = os.path.join(self.tmpdir, 'target/doc/pod') os.makedirs(fulltestdir) file = open(os.path.join(fulltestdir, testfile),", "self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, []), ['test/lib/perl/test.pm']) # Add a pod file when a pm file", "file when a pod file is in the list, get a list with", "= 'test.pod' fulltestdir = os.path.join(self.tmpdir, 'target/doc/pod') os.makedirs(fulltestdir) file = open(os.path.join(fulltestdir, testfile), 'w') file.write(\"test\\n\")", "extension in ['tpl', 'txt', 'xml']: testfile = \"test.%s\" % extension self.assertFalse(sourcehandler.is_wanted_file('', testfile)) #", "Test for False on empty fileslist self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', [])) # Test for wrong subdir", "class SourcehandlerTest(TestCase): \"\"\"Test class for sourcehandler.\"\"\" def setUp(self): \"\"\"Set up temp dir for", "\"\"\"Test is_wanted_dir function.\"\"\" # Test we get False if target is not in", "# Add a pod file when a pm file is in the list,", "empty fileslist self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', [])) # Test for wrong subdir self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', ['schema.pan'])) # Test", "list with only the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, [testpodfile]), [testpodfile]) def test_list_source_files(self):", "True)) def suite(self): \"\"\"Return all the testcases in this module.\"\"\" return TestLoader().loadTestsFromTestCase(SourcehandlerTest) if", "function.\"\"\" self.assertEquals(sourcehandler.get_source_files(self.tmpdir, False), []) self.assertFalse(sourcehandler.get_source_files(self.tmpdir, True)) def suite(self): \"\"\"Return all the testcases in", "= os.path.join(self.tmpdir, 'target/doc/pod') os.makedirs(fulltestdir) file = open(os.path.join(fulltestdir, testfile), 'w') file.write(\"test\\n\") file.close() self.assertEquals(sourcehandler.list_source_files(fulltestdir), [os.path.join(fulltestdir,", "\"\"\"Remove temp dir.\"\"\" shutil.rmtree(self.tmpdir) def test_maven_clean_compile(self): \"\"\"Test maven_clean_compile.\"\"\" repoloc = os.path.join(self.tmpdir, \"test\") os.makedirs(repoloc)", "sourcehandler class SourcehandlerTest(TestCase): \"\"\"Test class for sourcehandler.\"\"\" def setUp(self): \"\"\"Set up temp dir", "= open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/python\\n') file.close() self.assertFalse(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) def test_is_wanted_dir(self): \"\"\"Test is_wanted_dir function.\"\"\"", "['test/lib/perl/test.pm']) # Add a pod file when a pm file is in the", "\"\"\"Test list_source_files function.\"\"\" # Test a bogus dir self.assertEquals(sourcehandler.list_source_files(self.tmpdir), []) # Test a", "'pl', 'pan']: testfile = \"test.%s\" % extension self.assertTrue(sourcehandler.is_wanted_file('', testfile)) # Test invalid extensions", "get a list with only the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pod', testpodfile, [testperlfile]), [testpodfile])", "testfilename), \"w\") file.write('#!/usr/bin/python\\n') file.close() self.assertFalse(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) def test_is_wanted_dir(self): \"\"\"Test is_wanted_dir function.\"\"\" # Test", "\"\"\"Set up temp dir for tests.\"\"\" self.tmpdir = mkdtemp() def tearDown(self): \"\"\"Remove temp", "a basic pom.xml and return 0 file = open(os.path.join(repoloc, \"pom.xml\"), \"w\") file.write('<project><modelVersion>4.0.0</modelVersion><groupId>test</groupId>') file.write('<artifactId>test</artifactId><version>1</version></project>')", "\"\"\"Test class for sourcehandler.\"\"\" def setUp(self): \"\"\"Set up temp dir for tests.\"\"\" self.tmpdir", "pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, [testpodfile]), [testpodfile]) def test_list_source_files(self): \"\"\"Test list_source_files function.\"\"\" #", "self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, [testpodfile]), [testpodfile]) def test_list_source_files(self): \"\"\"Test list_source_files function.\"\"\" # Test a bogus", "open(os.path.join(fulltestdir, testfile), 'w') file.write(\"test\\n\") file.close() self.assertEquals(sourcehandler.list_source_files(fulltestdir), [os.path.join(fulltestdir, testfile)]) def test_get_source_files(self): \"\"\"Test get_source_files function.\"\"\"", "path self.assertTrue(sourcehandler.is_wanted_dir('/tmp/target/doc/pod', ['schema.pan'])) def test_handle_duplicates(self): \"\"\"Test handle_duplicates function.\"\"\" testperlfile = 'test/lib/perl/test.pm' testpodfile =", "a pod file when a pm file is in the list, get a", "def suite(self): \"\"\"Return all the testcases in this module.\"\"\" return TestLoader().loadTestsFromTestCase(SourcehandlerTest) if __name__", "sourcehandler.\"\"\" def setUp(self): \"\"\"Set up temp dir for tests.\"\"\" self.tmpdir = mkdtemp() def", "in ['pod', 'pm', 'pl', 'pan']: testfile = \"test.%s\" % extension self.assertTrue(sourcehandler.is_wanted_file('', testfile)) #", "file.close() self.assertTrue(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) # Test invalid shebang file = open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/python\\n')", "a correct dir testfile = 'test.pod' fulltestdir = os.path.join(self.tmpdir, 'target/doc/pod') os.makedirs(fulltestdir) file =", "[testperlfile]), [testpodfile]) # Add a pm file when a pod file is in", "'target/doc/pod') os.makedirs(fulltestdir) file = open(os.path.join(fulltestdir, testfile), 'w') file.write(\"test\\n\") file.close() self.assertEquals(sourcehandler.list_source_files(fulltestdir), [os.path.join(fulltestdir, testfile)]) def", "\"w\") file.write('#!/usr/bin/python\\n') file.close() self.assertFalse(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) def test_is_wanted_dir(self): \"\"\"Test is_wanted_dir function.\"\"\" # Test we", "\"\"\"Test handle_duplicates function.\"\"\" testperlfile = 'test/lib/perl/test.pm' testpodfile = 'test/doc/pod/test.pod' # Add a correct", "['tpl', 'txt', 'xml']: testfile = \"test.%s\" % extension self.assertFalse(sourcehandler.is_wanted_file('', testfile)) # Test valid", "import sourcehandler class SourcehandlerTest(TestCase): \"\"\"Test class for sourcehandler.\"\"\" def setUp(self): \"\"\"Set up temp", "self.tmpdir = mkdtemp() def tearDown(self): \"\"\"Remove temp dir.\"\"\" shutil.rmtree(self.tmpdir) def test_maven_clean_compile(self): \"\"\"Test maven_clean_compile.\"\"\"", "get a list with only the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, [testpodfile]), [testpodfile])", "extension in ['pod', 'pm', 'pl', 'pan']: testfile = \"test.%s\" % extension self.assertTrue(sourcehandler.is_wanted_file('', testfile))", "setUp(self): \"\"\"Set up temp dir for tests.\"\"\" self.tmpdir = mkdtemp() def tearDown(self): \"\"\"Remove", "Test invalid extensions for extension in ['tpl', 'txt', 'xml']: testfile = \"test.%s\" %", "on empty fileslist self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', [])) # Test for wrong subdir self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', ['schema.pan'])) #", "file.write('<artifactId>test</artifactId><version>1</version></project>') file.close() self.assertEqual(sourcehandler.maven_clean_compile(repoloc), 0) def test_is_wanted_file(self): \"\"\"Test is_wanted_file function.\"\"\" # Test valid extensions", "pm file is in the list, get a list with only the pod", "sys import shutil from tempfile import mkdtemp from unittest import TestCase, main, TestLoader", "test_is_wanted_dir(self): \"\"\"Test is_wanted_dir function.\"\"\" # Test we get False if target is not", "with only the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, [testpodfile]), [testpodfile]) def test_list_source_files(self): \"\"\"Test", "% extension self.assertFalse(sourcehandler.is_wanted_file('', testfile)) # Test valid shebang testfilename = \"test\" file =", "dir self.assertNotEqual(sourcehandler.maven_clean_compile(repoloc), 0) # test if it can run a basic pom.xml and", "os.makedirs(fulltestdir) file = open(os.path.join(fulltestdir, testfile), 'w') file.write(\"test\\n\") file.close() self.assertEquals(sourcehandler.list_source_files(fulltestdir), [os.path.join(fulltestdir, testfile)]) def test_get_source_files(self):", "file = open(os.path.join(fulltestdir, testfile), 'w') file.write(\"test\\n\") file.close() self.assertEquals(sourcehandler.list_source_files(fulltestdir), [os.path.join(fulltestdir, testfile)]) def test_get_source_files(self): \"\"\"Test", "self.assertEquals(sourcehandler.get_source_files(self.tmpdir, False), []) self.assertFalse(sourcehandler.get_source_files(self.tmpdir, True)) def suite(self): \"\"\"Return all the testcases in this", "\"test.%s\" % extension self.assertFalse(sourcehandler.is_wanted_file('', testfile)) # Test valid shebang testfilename = \"test\" file", "is_wanted_dir function.\"\"\" # Test we get False if target is not in the", "test if it fails on a empty dir self.assertNotEqual(sourcehandler.maven_clean_compile(repoloc), 0) # test if", "\"\"\"Test maven_clean_compile.\"\"\" repoloc = os.path.join(self.tmpdir, \"test\") os.makedirs(repoloc) # test if it fails on", "can run a basic pom.xml and return 0 file = open(os.path.join(repoloc, \"pom.xml\"), \"w\")", "subdir self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', ['schema.pan'])) # Test for a correct path self.assertTrue(sourcehandler.is_wanted_dir('/tmp/target/doc/pod', ['schema.pan'])) def test_handle_duplicates(self):", "self.assertFalse(sourcehandler.get_source_files(self.tmpdir, True)) def suite(self): \"\"\"Return all the testcases in this module.\"\"\" return TestLoader().loadTestsFromTestCase(SourcehandlerTest)", "file.write('<project><modelVersion>4.0.0</modelVersion><groupId>test</groupId>') file.write('<artifactId>test</artifactId><version>1</version></project>') file.close() self.assertEqual(sourcehandler.maven_clean_compile(repoloc), 0) def test_is_wanted_file(self): \"\"\"Test is_wanted_file function.\"\"\" # Test valid", "'test.pod' fulltestdir = os.path.join(self.tmpdir, 'target/doc/pod') os.makedirs(fulltestdir) file = open(os.path.join(fulltestdir, testfile), 'w') file.write(\"test\\n\") file.close()", "testfilename = \"test\" file = open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/perl\\n') file.close() self.assertTrue(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) #", "function.\"\"\" testperlfile = 'test/lib/perl/test.pm' testpodfile = 'test/doc/pod/test.pod' # Add a correct item to", "self.assertFalse(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) def test_is_wanted_dir(self): \"\"\"Test is_wanted_dir function.\"\"\" # Test we get False if", "pom.xml and return 0 file = open(os.path.join(repoloc, \"pom.xml\"), \"w\") file.write('<project><modelVersion>4.0.0</modelVersion><groupId>test</groupId>') file.write('<artifactId>test</artifactId><version>1</version></project>') file.close() self.assertEqual(sourcehandler.maven_clean_compile(repoloc),", "# Test valid shebang testfilename = \"test\" file = open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/perl\\n')", "function.\"\"\" # Test a bogus dir self.assertEquals(sourcehandler.list_source_files(self.tmpdir), []) # Test a correct dir", "Add a pm file when a pod file is in the list, get", "testfile)) # Test invalid extensions for extension in ['tpl', 'txt', 'xml']: testfile =", "list_source_files function.\"\"\" # Test a bogus dir self.assertEquals(sourcehandler.list_source_files(self.tmpdir), []) # Test a correct", "def test_is_wanted_file(self): \"\"\"Test is_wanted_file function.\"\"\" # Test valid extensions for extension in ['pod',", "False), []) self.assertFalse(sourcehandler.get_source_files(self.tmpdir, True)) def suite(self): \"\"\"Return all the testcases in this module.\"\"\"", "testfile)) # Test valid shebang testfilename = \"test\" file = open(os.path.join(self.tmpdir, testfilename), \"w\")", "run a basic pom.xml and return 0 file = open(os.path.join(repoloc, \"pom.xml\"), \"w\") file.write('<project><modelVersion>4.0.0</modelVersion><groupId>test</groupId>')", "= os.path.join(self.tmpdir, \"test\") os.makedirs(repoloc) # test if it fails on a empty dir", "for extension in ['tpl', 'txt', 'xml']: testfile = \"test.%s\" % extension self.assertFalse(sourcehandler.is_wanted_file('', testfile))", "invalid shebang file = open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/python\\n') file.close() self.assertFalse(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) def test_is_wanted_dir(self):", "file back self.assertEquals(sourcehandler.handle_duplicates('test.pod', testpodfile, [testperlfile]), [testpodfile]) # Add a pm file when a", "['schema.pan'])) def test_handle_duplicates(self): \"\"\"Test handle_duplicates function.\"\"\" testperlfile = 'test/lib/perl/test.pm' testpodfile = 'test/doc/pod/test.pod' #", "with only the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pod', testpodfile, [testperlfile]), [testpodfile]) # Add a", "= mkdtemp() def tearDown(self): \"\"\"Remove temp dir.\"\"\" shutil.rmtree(self.tmpdir) def test_maven_clean_compile(self): \"\"\"Test maven_clean_compile.\"\"\" repoloc", "self.assertNotEqual(sourcehandler.maven_clean_compile(repoloc), 0) # test if it can run a basic pom.xml and return", "for sourcehandler.\"\"\" import os import sys import shutil from tempfile import mkdtemp from", "False on empty fileslist self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', [])) # Test for wrong subdir self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', ['schema.pan']))", "[]) # Test a correct dir testfile = 'test.pod' fulltestdir = os.path.join(self.tmpdir, 'target/doc/pod')", "file.close() self.assertFalse(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) def test_is_wanted_dir(self): \"\"\"Test is_wanted_dir function.\"\"\" # Test we get False", "test if it can run a basic pom.xml and return 0 file =", "self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', [])) # Test for wrong subdir self.assertFalse(sourcehandler.is_wanted_dir('/tmp/target/test/', ['schema.pan'])) # Test for a", "list, get a list with only the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pod', testpodfile, [testperlfile]),", "test_list_source_files(self): \"\"\"Test list_source_files function.\"\"\" # Test a bogus dir self.assertEquals(sourcehandler.list_source_files(self.tmpdir), []) # Test", "if it fails on a empty dir self.assertNotEqual(sourcehandler.maven_clean_compile(repoloc), 0) # test if it", "shebang file = open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/python\\n') file.close() self.assertFalse(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) def test_is_wanted_dir(self): \"\"\"Test", "= 'test/lib/perl/test.pm' testpodfile = 'test/doc/pod/test.pod' # Add a correct item to an empty", "a correct item to an empty list self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, []), ['test/lib/perl/test.pm']) # Add", "0) # test if it can run a basic pom.xml and return 0", "os import sys import shutil from tempfile import mkdtemp from unittest import TestCase,", "in the list, get a list with only the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pm',", "file.close() self.assertEqual(sourcehandler.maven_clean_compile(repoloc), 0) def test_is_wanted_file(self): \"\"\"Test is_wanted_file function.\"\"\" # Test valid extensions for", "# test if it can run a basic pom.xml and return 0 file", "item to an empty list self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, []), ['test/lib/perl/test.pm']) # Add a pod", "on a empty dir self.assertNotEqual(sourcehandler.maven_clean_compile(repoloc), 0) # test if it can run a", "return 0 file = open(os.path.join(repoloc, \"pom.xml\"), \"w\") file.write('<project><modelVersion>4.0.0</modelVersion><groupId>test</groupId>') file.write('<artifactId>test</artifactId><version>1</version></project>') file.close() self.assertEqual(sourcehandler.maven_clean_compile(repoloc), 0) def", "\"\"\"Test is_wanted_file function.\"\"\" # Test valid extensions for extension in ['pod', 'pm', 'pl',", "test_maven_clean_compile(self): \"\"\"Test maven_clean_compile.\"\"\" repoloc = os.path.join(self.tmpdir, \"test\") os.makedirs(repoloc) # test if it fails", "file = open(os.path.join(self.tmpdir, testfilename), \"w\") file.write('#!/usr/bin/perl\\n') file.close() self.assertTrue(sourcehandler.is_wanted_file(self.tmpdir, testfilename)) # Test invalid shebang", "'pan']: testfile = \"test.%s\" % extension self.assertTrue(sourcehandler.is_wanted_file('', testfile)) # Test invalid extensions for", "self.assertTrue(sourcehandler.is_wanted_file('', testfile)) # Test invalid extensions for extension in ['tpl', 'txt', 'xml']: testfile", "dir for tests.\"\"\" self.tmpdir = mkdtemp() def tearDown(self): \"\"\"Remove temp dir.\"\"\" shutil.rmtree(self.tmpdir) def", "only the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pod', testpodfile, [testperlfile]), [testpodfile]) # Add a pm", "'xml']: testfile = \"test.%s\" % extension self.assertFalse(sourcehandler.is_wanted_file('', testfile)) # Test valid shebang testfilename", "extensions for extension in ['pod', 'pm', 'pl', 'pan']: testfile = \"test.%s\" % extension", "# Test for a correct path self.assertTrue(sourcehandler.is_wanted_dir('/tmp/target/doc/pod', ['schema.pan'])) def test_handle_duplicates(self): \"\"\"Test handle_duplicates function.\"\"\"", "list self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, []), ['test/lib/perl/test.pm']) # Add a pod file when a pm", "a list with only the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pm', testperlfile, [testpodfile]), [testpodfile]) def", "a bogus dir self.assertEquals(sourcehandler.list_source_files(self.tmpdir), []) # Test a correct dir testfile = 'test.pod'", "from tempfile import mkdtemp from unittest import TestCase, main, TestLoader sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib'))) #", "TestCase, main, TestLoader sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib'))) # noqa from quattordocbuild import sourcehandler class SourcehandlerTest(TestCase):", "main, TestLoader sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib'))) # noqa from quattordocbuild import sourcehandler class SourcehandlerTest(TestCase): \"\"\"Test", "# Test a correct dir testfile = 'test.pod' fulltestdir = os.path.join(self.tmpdir, 'target/doc/pod') os.makedirs(fulltestdir)", "import shutil from tempfile import mkdtemp from unittest import TestCase, main, TestLoader sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),", "the list, get a list with only the pod file back self.assertEquals(sourcehandler.handle_duplicates('test.pod', testpodfile,", "testfile = \"test.%s\" % extension self.assertFalse(sourcehandler.is_wanted_file('', testfile)) # Test valid shebang testfilename =", "extension self.assertFalse(sourcehandler.is_wanted_file('', testfile)) # Test valid shebang testfilename = \"test\" file = open(os.path.join(self.tmpdir,", "Test a correct dir testfile = 'test.pod' fulltestdir = os.path.join(self.tmpdir, 'target/doc/pod') os.makedirs(fulltestdir) file", "test_handle_duplicates(self): \"\"\"Test handle_duplicates function.\"\"\" testperlfile = 'test/lib/perl/test.pm' testpodfile = 'test/doc/pod/test.pod' # Add a" ]
[ "| PRODUCER | [int][] | *(==)* | Find releases linked to the given", "number. | | LANGUAGES | [str][] | *SOME* | Filter using the language,", "_ConditionProxy( \"date\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_all() ) PATCH: t.Final[_ConditionProxy]", "| *(==)* | Find characters linked to the given visual novel ID. |", "to any (not all) of the given traits, the `!=` filter will return", "class storing all the attributes `Staff` type supports as condition. Hint: Check the", "| [str][] | *SOME* | Filter using the original language of the VN.", "| | PLATFORMS | [str][] | *SOME* | Filter using an array of", "Filter using the array of languages, the release is available in. | |", "supports (`==`, `!=`, `>`, `<`, `>=`, `<=`) operators. `|BaseCondition.ID_ARRAY| SOME |` supports only", "linked to any of the given traits. | \"\"\" # noqa: E501 NAME:", "| Search for the VN using it's title and releases. | | TAGS", "\"\"\" # noqa: E501 AID: t.Final[_ConditionProxy] = _ConditionProxy(\"aid\", operator=Operator(\"=\")) AID_ARRAY: t.Final[_ConditionProxy] = AID", "t.Final[_ConditionProxy] = _ConditionProxy( \"language\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGE SEARCH: t.Final[_ConditionProxy] =", "alias IDs. | | SEARCH | [str][] | *(%)* | Performs a search", ") PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_some() ) RELEASED_DATE:", "(`==`, `!=`) are supported. `SOME + X` means `SOME` and `X` operators are", "Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------| | NAME | [str][] |", "@classmethod def fill_some(cls, *symbols: str) -> Operator: \"\"\" A factory method for creating", "release date of the VN. | | PATCH | [bool][] | *(==)* |", "t.Final[_ConditionProxy] = _ConditionProxy(\"tags\", Operator.fill_some()) TAGS_ARRAY: t.Final[_ConditionProxy] = TAGS __slots__ = () class ReleaseCondition(BaseCondition):", "| Find characters linked to the given visual novel ID. | | VN_ARRAY", "| TRAITS_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* | The `=` filter", "if the release is a patch. | | FREEWARE | [bool][] | *(==)*", "Attribute | Field Value Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------| |", "| CATALOG | [str][] | *SOME* | Filter using the Catalog number. |", "VN. | | LANGUAGES | [None][] or [str][] | *SOME* | Filter using", "Filter using the language, the release is available in. | | LANGUAGES_ARRAY |", "t.Final[_ConditionProxy] = LANGUAGES PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] =", "of the VN. | | ORIG_LANG_ARRAY | A [typing.Iterable][] of [str][]s | *SOME*", "Operator object with some symbols. Args: *symbols (str): The additional symbols of the", "of objects which match the values from the API. | Field | Field", "(str): The symbols of the operator. Attributes: symbols (t.Tuple[str]): The symbols of the", "language of producer. | | LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s | *SOME*", "[str][] | *SOME + (%)* | Find the release using the original/official title.", ") GTIN: t.Final[_ConditionProxy] = _ConditionProxy( \"gtin\", operator=Operator.fill_some() ) CATALOG: t.Final[_ConditionProxy] = _ConditionProxy( \"catalog\",", "hope you understand the above. :) Tip: `Field Value Type` means the type", "operator=Operator.fill_all()) ID_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"id\", operator=Operator.fill_some() ) __slots__ = () class VNCondition(BaseCondition):", "of value against which the field should be conditioned. Tip: All `X_ARRAY` fields", "Args: *symbols (str): The additional symbols of the operator. Returns: Operator: The created", "linked to the given visual novel ID. | | VN_ARRAY | A [typing.Iterable][]", "| Filter using an `ID` | | ID_ARRAY | A [typing.Iterable][] of [int][]s", "str) -> Operator: \"\"\" A factory method for creating an Operator object with", "Field Value Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------------------------------| | TITLE |", "QuoteCondition(BaseCondition): \"\"\" A class storing all the attributes `Staff` type supports as condition.", "condition. Info: This class doesn't inherit from `BaseCondition` and doesn't have `ID` and", "Performs a search on the name, original and aliases fields. | \"\"\" #", "not meant to be created by users. \"\"\" __slots__ = (\"symbols\",) def __init__(self,", "`|UserCondition.USERNAME| SOME + (%)|` supports (`==`, `!=`, `%`) operators. If there is neither", "| Check if the release is a doujin. | | TYPE | [str][]", "__slots__ = () class QuoteCondition(BaseCondition): \"\"\" A class storing all the attributes `Staff`", "| *(==)* | Label assigned to the VN. | \"\"\" # noqa: E501", "producer. | | SEARCH | [str][] | *(%)* | Performs a search on", "A class storing all the attributes `User` type supports as condition. Hint: Check", "operators (`==`, `!=`, `>`, `<`, `>=`, `<=`) are supported. `SOME` means only operators", "of [str][]s | *SOME* | Filter using an array of PLATFORMS. | \"\"\"", "object. Info: This method fills the `=`, `!=`, `>`, `<`, `>=`, `<=` symbols.", "| Operations Supported | Description | |-----------|-----------------------------------|----------------------|------------------------------------------| | VN | [int][] | *ALL*", "is available in. | | PLATFORMS | [str][] | *SOME* | Filter using", "operator=Operator.fill_some(\"~\") ) USERNAME_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator(\"=\") ) __slots__ = () class", "t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_all() )", "RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_all() ) LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some()", "LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGE SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = () class", "TRAITS: t.Final[_ConditionProxy] = _ConditionProxy( \"traits\", operator=Operator.fill_some() ) TRAITS_ARRAY: t.Final[_ConditionProxy] = TRAITS __slots__ =", "Supported | Description | |-----------|-----------------------------------|----------------------|------------------------------------------| | VN | [int][] | *ALL* | Find", "t.Final[_ConditionProxy] = ORIG_LANG SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) TAGS: t.Final[_ConditionProxy] = _ConditionProxy(\"tags\", Operator.fill_some())", "neither `ALL` nor `SOME` in the condition but an operator is specified, then", "the release is a patch. | | FREEWARE | [bool][] | *(==)* |", "visual novel ID. | | VN_ARRAY | A [typing.Iterable][] of [int][]s | *(==)*", "array of languages, the VN is available in. | | FIRST_CHAR | [None][]", "| | ORIGINAL | [None][] or [str][] | *SOME + (%)* | Find", "RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_all() ) PATCH: t.Final[_ConditionProxy] = _ConditionProxy(\"patch\", operator=Operator(\"=\")) FREEWARE:", "(str): The additional symbols of the operator. Returns: Operator: The created Operator object.", "attributes `Ulist` type supports as condition. Hint: Check the `UlistLabelsCondition` class for more", "SOME | Filter using an array of `ID`s.| \"\"\" # noqa: E501 ID:", ") LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES FIRSTCHAR: t.Final[_ConditionProxy] = _ConditionProxy( \"firstchar\", operator=Operator.fill_some() ) ORIG_LANG:", "is a freeware. | | DOUJIN | [bool][] | *(==)* | Check if", "Info: This method fills the `=` and `!=` symbols. \"\"\" return cls(\"=\", \"!=\",", "| *SOME + (%)* | Find using name of character. | | ORIGINAL", "| A [typing.Iterable][] of [int][]s | *SOME* | Find using an array of", "original languages of the VN. | | SEARCH | [str][] | *(%)* |", "special value '0' is recognized as the currently logged in user. | \"\"\"", "array of PLATFORMS. | | RELEASED | [None][] | *SOME* | Filter using", "values and these fields yield an iterable of objects which match the values", "Field Value Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------| | NAME |", "(%)* | Filter using the TITLE Field. | | PLATFORMS | [None][] or", "| TAGS | [int][] | *SOME* | Find VNs by tag. | |", "by alias ID. | | AID_ARRAY | A [typing.Iterable][] of [int][]s | *(==)*", "operator=Operator.fill_all() ) LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES", "FREEWARE: t.Final[_ConditionProxy] = _ConditionProxy( \"freeware\", operator=Operator(\"=\") ) DOUJIN: t.Final[_ConditionProxy] = _ConditionProxy(\"doujin\", operator=Operator(\"=\")) TYPE:", "[int][]s | *(==)* | Find staff by an array of alias IDs. |", "VN_ARRAY | A [typing.Iterable][] of [int][]s | *(==)* | Find characters linked to", "LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES PLATFORMS: t.Final[_ConditionProxy]", "class ReleaseCondition(BaseCondition): \"\"\" A class storing all the attributes `Release` type supports as", "[str][]s | *(==)* | Find user using an array of usernames. | \"\"\"", "of [int][]s | *SOME* | Find using an array of visual novel IDs.", "the attributes `Release` type supports as condition. Hint: Check the `BaseCondition` class for", "UserCondition(BaseCondition): \"\"\" A class storing all the attributes `User` type supports as condition.", "of producer. | | LANGUAGE | [str][] | *SOME* | Filter using language", "*SOME* | Filter using the language, the release is available in. | |", "\"\"\" A class storing all the attributes `User` type supports as condition. Hint:", "t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_some()) VN_ARRAY: t.Final[_ConditionProxy] = VN PRODUCER: t.Final[_ConditionProxy] = _ConditionProxy( \"producer\",", "match all the vn not starting with an alphabet. | | ORIG_LANG |", "*SOME + (%)* | Find using original/official name of the character. Can't use", "as t from .proxy import _ConditionProxy from ..objects import UlistLabels if t.TYPE_CHECKING: from", "Find using an array of visual novel IDs. | | LABEL | [int][]", "is recognized as the currently logged in user. | \"\"\" # noqa: E501", "original language of the VN. | | ORIG_LANG_ARRAY | A [typing.Iterable][] of [str][]s", "attributes `VN` type supports as condition. Hint: Check the `BaseCondition` class for more", "*SOME* | Find all the releases linked to the given visual novel IDs", "`<=`) are supported. `SOME` means only operators (`==`, `!=`) are supported. `SOME +", "of the producer. Can't use `%` with `None`. | | TYPE | [str][]", "class storing all the attributes `Release` type supports as condition. Hint: Check the", "linked to the given visual novel IDs in the array. | | PRODUCER", "[str][]s | *SOME* | Filter using the array of languages, the VN is", "\"name\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) SEARCH: t.Final[_ConditionProxy] =", "| Find using name of character. | | ORIGINAL | [None][] or [str][]", "an `ID` | | ID_ARRAY | A [typing.Iterable][] of [int][]s | SOME |", "of the VN or None to match all the vn not starting with", "TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some() ) GTIN: t.Final[_ConditionProxy] = _ConditionProxy( \"gtin\", operator=Operator.fill_some()", "| VN | [int][] | *ALL* | Find releases linked to the given", "| Operations Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------------------------------| | TITLE | [str][] | *SOME", "_ConditionProxy( \"released\", operator=Operator.fill_all() ) LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy]", "operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_some() )", "by visual novel ID. | | VN_ARRAY | A [typing.Iterable][] of [int][]s |", "Field Value Type | Operations Supported | Description | |----------|-----------------------------------|----------------------|--------------------------------| | ID |", "\"language\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGE SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__", "constructor. Args: *symbols (str): The symbols of the operator. Attributes: symbols (t.Tuple[str]): The", "= _ConditionProxy( \"username\", operator=Operator(\"=\") ) __slots__ = () class UlistLabelsCondition: \"\"\" A class", "field. | | PLATFORMS_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* | Filter", ") TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some() ) LANGUAGE: t.Final[_ConditionProxy] = _ConditionProxy( \"language\",", "| A [typing.Iterable][] of [int][]s | *SOME* | The `=` filter will return", "\"\"\" A class storing all the attributes `Ulist` type supports as condition. Hint:", "VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_some()) VN_ARRAY: t.Final[_ConditionProxy] = VN PRODUCER: t.Final[_ConditionProxy] = _ConditionProxy(", "QuoteCondition, \"user\": UserCondition, \"ulist-labels\": UlistLabelsCondition, \"ulist\": UlistCondition, } cls = condition_map[ type.__name__.lower() if", "| | VN_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* | Find all", "TYPE | [str][] | *SOME* | Filter using type of producer. | |", "| Field Value Type | Operations Supported | Description | |----------------|-----------------------------------|----------------------|----------------------------------------| | USERNAME", "t.Final[_ConditionProxy] = PLATFORMS RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] =", "A [typing.Iterable][] of [int][]s | *(==)* | Find staff by an array of", "using an array of PLATFORMS. | | RELEASED | [None][] | *SOME* |", "| [int][] | *ALL* | Find releases linked to the given visual novel", "from ..objects import UlistLabels if t.TYPE_CHECKING: from ..interface import T __all__ = (", "USERNAME_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator(\"=\") ) __slots__ = () class UlistLabelsCondition: \"\"\"", "PLATFORMS | [str][] | *SOME* | Filter using an array of PLATFORMS. |", "Info: This one only supports `ID` and `ID_ARRAY` filters of `BaseCondition`. \"\"\" #", "Info: This class doesn't inherit from `BaseCondition` and doesn't have `ID` and `ID_ARRAY`", "| ID_ARRAY | A [typing.Iterable][] of [int][]s | SOME | Filter using an", "= _ConditionProxy(\"search\", operator=Operator(\"~\")) VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator(\"=\")) VN_ARRAY: t.Final[_ConditionProxy] = VN TRAITS:", "[bool][] | *(==)* | Check if the release is a freeware. | |", "*symbols) class BaseCondition: \"\"\" A base class storing the comman condition attributes. Tip:", "Find all the releases linked to the given visual novel IDs in the", "Supported | Description | |----------|-----------------------------------|----------------------|--------------------------------| | ID | [int][] | ALL | Filter", "\"\"\" A class storing all the attributes `Staff` type supports as condition. Hint:", "Operator: \"\"\" An object for storing operators for XCondition attributes to check condition", "the name, original and aliases fields. | \"\"\" # noqa: E501 AID: t.Final[_ConditionProxy]", "A [typing.Iterable][] of [int][]s | *SOME* | The `=` filter will return chars", "if the release is a freeware. | | DOUJIN | [bool][] | *(==)*", "[str][] | *SOME + (%)* | Filter using the TITLE Field. | |", "the attributes `Staff` type supports as condition. Hint: Check the `BaseCondition` class for", "the comman condition attributes. Tip: `ALL` below means all operators (`==`, `!=`, `>`,", "operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_all() ) LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy(", "SOME + (%)|` supports (`==`, `!=`, `%`) operators. If there is neither `ALL`", "() class UserCondition(BaseCondition): \"\"\" A class storing all the attributes `User` type supports", "operator=Operator.fill_some() ) GTIN: t.Final[_ConditionProxy] = _ConditionProxy( \"gtin\", operator=Operator.fill_some() ) CATALOG: t.Final[_ConditionProxy] = _ConditionProxy(", "search on the name, original and aliases fields. | \"\"\" # noqa: E501", "the given visual novel ID. | | VN_ARRAY | A [typing.Iterable][] of [int][]s", "the TITLE Field. | | PLATFORMS | [None][] or [str][] | *SOME* |", "| Find using name of producer. | | ORIGINAL | [None][] or [str][]", "the attributes `VN` type supports as condition. Hint: Check the `BaseCondition` class for", "noqa: E501 UID: t.Final[_ConditionProxy] = _ConditionProxy(\"uid\", operator=Operator(\"=\")) class UlistCondition(UlistLabelsCondition): \"\"\" A class storing", "not linked to any of the given traits. | \"\"\" # noqa: E501", "in. | | LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* | Filter", "*SOME* | Filter using language of producer. | | LANGUAGES_ARRAY | A [typing.Iterable][]", "Find user by their username. | | USERNAME_ARRAY | A [typing.Iterable][] of [str][]s", "t.Final[_ConditionProxy] = _ConditionProxy( \"traits\", operator=Operator.fill_some() ) TRAITS_ARRAY: t.Final[_ConditionProxy] = TRAITS __slots__ = ()", "| | ORIG_LANG | [str][] | *SOME* | Filter using the original language", "| Attribute | Field Value Type | Operations Supported | Description | |-----------|------------------|----------------------|------------------------------------------------------------------------------------------|", "for creating an Operator object with all symbols. Args: *symbols (str): The additional", "`!=`, `>`, `<`, `>=`, `<=`) operators. `|BaseCondition.ID_ARRAY| SOME |` supports only (`==`, `!=`)", "is a doujin. | | TYPE | [str][] | *SOME* | Filter using", "= VN TRAITS: t.Final[_ConditionProxy] = _ConditionProxy( \"traits\", operator=Operator.fill_some() ) TRAITS_ARRAY: t.Final[_ConditionProxy] = TRAITS", "with an alphabet. | | ORIG_LANG | [str][] | *SOME* | Filter using", "operator=Operator(\"=\")) FREEWARE: t.Final[_ConditionProxy] = _ConditionProxy( \"freeware\", operator=Operator(\"=\") ) DOUJIN: t.Final[_ConditionProxy] = _ConditionProxy(\"doujin\", operator=Operator(\"=\"))", "linked to the given producer ID. | | TITLE | [str][] | *SOME", "*SOME + (%)* | Find the release using the title. | | ORIGINAL", "() class CharacterCondition(BaseCondition): \"\"\" A class storing all the attributes `Character` type supports", "# noqa: E501 TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) PLATFORMS: t.Final[_ConditionProxy] =", "*SOME* | Find VNs using an array of tags. \"\"\" # noqa: E501", "| RELEASED_DATE | date | *ALL* | Filter using the release date of", "the field should be conditioned. Tip: All `X_ARRAY` fields must be conditioned against", "| *SOME + (%)* | Find the release using the title. | |", "`Ulist` type supports as condition. Hint: Check the `UlistLabelsCondition` class for more information.", "Description | |-----------|------------------|----------------------|------------------------------------------------------------------------------------------| | UID | [int][] | *(==)* | Find using user", "+ (%)* | Filter using the TITLE Field. | | PLATFORMS | [None][]", "| [int][] | *(==)* | Find staff by alias ID. | | AID_ARRAY", "an operator is specified, then that means only that operator is supported. I", "operator=Operator(\"=\") ) TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy(", "\"type\", operator=Operator.fill_some() ) GTIN: t.Final[_ConditionProxy] = _ConditionProxy( \"gtin\", operator=Operator.fill_some() ) CATALOG: t.Final[_ConditionProxy] =", "E501 NAME: t.Final[_ConditionProxy] = _ConditionProxy( \"name\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\",", "fields. | \"\"\" # noqa: E501 AID: t.Final[_ConditionProxy] = _ConditionProxy(\"aid\", operator=Operator(\"=\")) AID_ARRAY: t.Final[_ConditionProxy]", "operator=Operator(\"=\") ) __slots__ = () class UlistLabelsCondition: \"\"\" A class storing all the", "attributes to check condition support. Warning: This object is not meant to be", "`|BaseCondition.ID| ALL |` supports (`==`, `!=`, `>`, `<`, `>=`, `<=`) operators. `|BaseCondition.ID_ARRAY| SOME", "UlistLabelsCondition, \"ulist\": UlistCondition, } cls = condition_map[ type.__name__.lower() if type != UlistLabels else", "supported. I hope you understand the above. :) Tip: `Field Value Type` means", "\"ReleaseCondition\", \"ProducerCondition\", \"CharacterCondition\", \"StaffCondition\", \"QuoteCondition\", \"UserCondition\", \"UlistLabelsCondition\", \"UlistCondition\", \"_condition_selector\", ) class Operator: \"\"\"", "= _ConditionProxy(\"aid\", operator=Operator(\"=\")) AID_ARRAY: t.Final[_ConditionProxy] = AID SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__", "A [typing.Iterable][] of [int][]s | *SOME* | Find all the releases linked to", "() class UlistLabelsCondition: \"\"\" A class storing all the attributes `UlistLabels` type supports", "PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS __slots__ =", "using the JAN/UPC/EAN code. | | CATALOG | [str][] | *SOME* | Filter", "of [str][]s | *SOME* | Filter using an array of languages of producer.", "[int][]s | *SOME* | The `=` filter will return chars that are linked", "`=` filter will return chars that are linked to any (not all) of", "*symbols (str): The symbols of the operator. Attributes: symbols (t.Tuple[str]): The symbols of", "to check condition support. Warning: This object is not meant to be created", "| A [typing.Iterable][] of [int][]s | *(==)* | Find staff by an array", "# noqa: E501 __slots__ = () class UserCondition(BaseCondition): \"\"\" A class storing all", "alias ID. | | AID_ARRAY | A [typing.Iterable][] of [int][]s | *(==)* |", "[None][] or [str][] | *SOME + (%)* | Find using original/official name of", "symbols. \"\"\" return cls(\"=\", \"!=\", *symbols) @classmethod def fill_all(cls, *symbols: str) -> Operator:", "*ALL* | Find by visual novel ID. | | VN_ARRAY | A [typing.Iterable][]", "will return chars that are not linked to any of the given traits.", "filter will return chars that are linked to any (not all) of the", "\"BaseCondition\", \"ReleaseCondition\", \"ProducerCondition\", \"CharacterCondition\", \"StaffCondition\", \"QuoteCondition\", \"UserCondition\", \"UlistLabelsCondition\", \"UlistCondition\", \"_condition_selector\", ) class Operator:", "def fill_some(cls, *symbols: str) -> Operator: \"\"\" A factory method for creating an", "`>`, `<`, `>=`, `<=`) operators. `|BaseCondition.ID_ARRAY| SOME |` supports only (`==`, `!=`) operators.", "operator=Operator.fill_some(\"~\") ) TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some() ) LANGUAGE: t.Final[_ConditionProxy] = _ConditionProxy(", ") SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator(\"=\")) VN_ARRAY: t.Final[_ConditionProxy]", "the release using the original/official title. (`%` operation not supported for `None`) |", "\"ProducerCondition\", \"CharacterCondition\", \"StaffCondition\", \"QuoteCondition\", \"UserCondition\", \"UlistLabelsCondition\", \"UlistCondition\", \"_condition_selector\", ) class Operator: \"\"\" An", "(%)* | Find the release using the title. | | ORIGINAL | [None][]", "use `%` with `None`. | | TYPE | [str][] | *SOME* | Filter", "| A [typing.Iterable][] of [str][]s | *SOME* | Filter using an array of", "class QuoteCondition(BaseCondition): \"\"\" A class storing all the attributes `Staff` type supports as", "\"UlistLabelsCondition\", \"UlistCondition\", \"_condition_selector\", ) class Operator: \"\"\" An object for storing operators for", "| Operations Supported | Description | |-----------|------------------|----------------------|------------------------------------------------------------------------------------------| | UID | [int][] | *(==)*", "from .proxy import _ConditionProxy from ..objects import UlistLabels if t.TYPE_CHECKING: from ..interface import", "of the VN. | | SEARCH | [str][] | *(%)* | Search for", "Operator constructor. Args: *symbols (str): The symbols of the operator. Attributes: symbols (t.Tuple[str]):", "name, original and aliases fields. | \"\"\" # noqa: E501 NAME: t.Final[_ConditionProxy] =", "| Filter using an array of languages of producer. | | SEARCH |", "creating an Operator object with some symbols. Args: *symbols (str): The additional symbols", "UlistLabels if t.TYPE_CHECKING: from ..interface import T __all__ = ( \"VNCondition\", \"BaseCondition\", \"ReleaseCondition\",", "doesn't have `ID` and `ID_ARRAY` filters. | Attribute | Field Value Type |", "release is a freeware. | | DOUJIN | [bool][] | *(==)* | Check", "\"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_some()", "| | SEARCH | [str][] | *(%)* | Performs a search on the", "| Description | |-----------|-----------------------------------|----------------------|------------------------------------------| | VN | [int][] | *ALL* | Find by", "of the original languages of the VN. | | SEARCH | [str][] |", "`>=`, `<=`) operators. `|BaseCondition.ID_ARRAY| SOME |` supports only (`==`, `!=`) operators. `|UserCondition.USERNAME| SOME", "SEARCH | [str][] | *(%)* | Performs a search on the name, original", "freeware. | | DOUJIN | [bool][] | *(==)* | Check if the release", "a `None` value for `RELEASED`. | | RELEASED_DATE | [datetime.date][] | *ALL* |", "| TITLE | [str][] | *SOME + (%)* | Find the release using", "in. | | FIRST_CHAR | [None][] or [str][] | *SOME* | Filter using", "operator. Returns: Operator: The created Operator object. Info: This method fills the `=`,", "| |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------------------------------| | TITLE | [str][] | *SOME + (%)* | Filter using", "tags. \"\"\" # noqa: E501 TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) PLATFORMS:", "_ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy]", "Filter using the PLATFORMS field. | | PLATFORMS_ARRAY | A [typing.Iterable][] of [str][]s", "must be conditioned against an Iterable of values and these fields yield an", "of `ID`s.| \"\"\" # noqa: E501 ID: t.Final[_ConditionProxy] = _ConditionProxy(\"id\", operator=Operator.fill_all()) ID_ARRAY: t.Final[_ConditionProxy]", "are supported. For example: `|BaseCondition.ID| ALL |` supports (`==`, `!=`, `>`, `<`, `>=`,", "| PATCH | [bool][] | *(==)* | Check if the release is a", "traits, the `!=` filter will return chars that are not linked to any", "A [typing.Iterable][] of [int][]s | SOME | Filter using an array of `ID`s.|", "[str][] | *SOME* | Filter using the language, the release is available in.", "*SOME* | Filter using type of producer. | | LANGUAGE | [str][] |", "field should be conditioned. Tip: All `X_ARRAY` fields must be conditioned against an", "| [str][] | *SOME* | Filter using an array of PLATFORMS. | |", "Filter using a `None` value for `RELEASED`. | | RELEASED_DATE | date |", "| Find using original/official name of the producer. Can't use `%` with `None`.", "@classmethod def fill_all(cls, *symbols: str) -> Operator: \"\"\" A factory method for creating", "Find VNs by tag. | | TAGS_ARRAY | A [typing.Iterable][] of [int][]s |", "type: t.Type[T], ): condition_map = { \"vn\": VNCondition, \"release\": ReleaseCondition, \"producer\": ProducerCondition, \"character\":", "an Operator object with all symbols. Args: *symbols (str): The additional symbols of", "A factory method for creating an Operator object with some symbols. Args: *symbols", "release is a patch. | | FREEWARE | [bool][] | *(==)* | Check", "| Attribute | Field Value Type | Operations Supported | Description | |--------------|-----------------------------------|----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------|", "Filter using an array of PLATFORMS. | | RELEASED | [None][] | *SOME*", "ID: t.Final[_ConditionProxy] = _ConditionProxy(\"id\", operator=Operator.fill_all()) ID_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"id\", operator=Operator.fill_some() ) __slots__", "TRAITS_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* | The `=` filter will", "| Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------| | NAME | [str][] | *SOME + (%)* |", "symbols of the operator. Returns: Operator: The created Operator object. Info: This method", "t.Final[_ConditionProxy] = AID SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = () class QuoteCondition(BaseCondition):", "LANGUAGES PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS __slots__", "| | TAGS_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* | Find VNs", "noqa: E501 AID: t.Final[_ConditionProxy] = _ConditionProxy(\"aid\", operator=Operator(\"=\")) AID_ARRAY: t.Final[_ConditionProxy] = AID SEARCH: t.Final[_ConditionProxy]", "*SOME* | Find characters by trait. | | TRAITS_ARRAY | A [typing.Iterable][] of", "[int][] | *ALL* | Find releases linked to the given visual novel ID.", "[str][] | *(%)* | Performs a search on the name, original and aliases", "is not meant to be created by users. \"\"\" __slots__ = (\"symbols\",) def", "condition. Hint: Check the `UlistLabelsCondition` class for more information. | Attribute | Field", "\"id\", operator=Operator.fill_some() ) __slots__ = () class VNCondition(BaseCondition): \"\"\" A class storing all", "that means only that operator is supported. I hope you understand the above.", "using an array of usernames. | \"\"\" # noqa: E501 USERNAME: t.Final[_ConditionProxy] =", "\"original\", operator=Operator.fill_some(\"~\") ) RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] =", "\"freeware\", operator=Operator(\"=\") ) DOUJIN: t.Final[_ConditionProxy] = _ConditionProxy(\"doujin\", operator=Operator(\"=\")) TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\",", "| AID | [int][] | *(==)* | Find staff by alias ID. |", "[bool][] | *(==)* | Check if the release is a patch. | |", "Type | Operations Supported | Description | |----------|-----------------------------------|----------------------|--------------------------------| | ID | [int][] |", "operator=Operator(\"=\")) VN_ARRAY: t.Final[_ConditionProxy] = VN TRAITS: t.Final[_ConditionProxy] = _ConditionProxy( \"traits\", operator=Operator.fill_some() ) TRAITS_ARRAY:", "| Field Value Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|----------------------------------------------------------------------------------------------| | VN", "usernames. | \"\"\" # noqa: E501 USERNAME: t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator.fill_some(\"~\") )", "_ConditionProxy( \"released\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_all() ) LANGUAGES: t.Final[_ConditionProxy]", "operators. `|BaseCondition.ID_ARRAY| SOME |` supports only (`==`, `!=`) operators. `|UserCondition.USERNAME| SOME + (%)|`", "FREEWARE | [bool][] | *(==)* | Check if the release is a freeware.", "| LANGUAGE | [str][] | *SOME* | Filter using language of producer. |", "Filter using an array of the original languages of the VN. | |", "| LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* | Filter using the", "| [None][] or [str][] | *SOME* | Filter using the first character of", "`None`. | | SEARCH | [str][] | *(%)* | Performs a search on", "Check the `BaseCondition` class for more information. Info: This one only supports `ID`", "only supports `ID` and `ID_ARRAY` filters of `BaseCondition`. \"\"\" # noqa: E501 __slots__", "all the attributes `Producer` type supports as condition. Hint: Check the `BaseCondition` class", "The special value '0' is recognized as the currently logged in user. |", "..objects import UlistLabels if t.TYPE_CHECKING: from ..interface import T __all__ = ( \"VNCondition\",", "E501 USERNAME: t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator.fill_some(\"~\") ) USERNAME_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"username\",", "as condition. Hint: Check the `BaseCondition` class for more information. | Attribute |", "Find the release using the original/official title. (`%` operation not supported for `None`)", "ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) VN:", "name, original and aliases fields. | \"\"\" # noqa: E501 AID: t.Final[_ConditionProxy] =", "[int][] | *ALL* | Find by visual novel ID. | | VN_ARRAY |", "more information. Info: This one only supports `ID` and `ID_ARRAY` filters of `BaseCondition`.", "Attribute | Field Value Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|----------------------------------------------------------------------------------------------| |", "| \"\"\" # noqa: E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_some()) VN_ARRAY: t.Final[_ConditionProxy] =", "TITLE Field. | | PLATFORMS | [None][] or [str][] | *SOME* | Filter", "(`==`, `!=`, `>`, `<`, `>=`, `<=`) are supported. `SOME` means only operators (`==`,", "of [str][]s | *SOME* | Filter using the array of languages, the VN", "of [int][]s | *(==)* | Find characters linked to the given visual novel", "operators. If there is neither `ALL` nor `SOME` in the condition but an", "| Description | |----------|-----------------------------------|----------------------|--------------------------------| | ID | [int][] | ALL | Filter using", "= LANGUAGE SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = () class CharacterCondition(BaseCondition): \"\"\"", "fills the `=`, `!=`, `>`, `<`, `>=`, `<=` symbols. \"\"\" return cls(\"=\", \"!=\",", "AID | [int][] | *(==)* | Find staff by alias ID. | |", "\"name\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) TYPE: t.Final[_ConditionProxy] =", "*(==)* | Label assigned to the VN. | \"\"\" # noqa: E501 VN:", "SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = () class CharacterCondition(BaseCondition): \"\"\" A class", "| Find all the releases linked to the given visual novel IDs in", "*(%)* | Performs a search on the name, original and aliases fields. |", "release using the title. | | ORIGINAL | [None][] or [str][] | *SOME", "= _ConditionProxy( \"released\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_all() ) LANGUAGES:", "| [None][] or [str][] | *SOME + (%)* | Find using original/official name", "_ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = () class QuoteCondition(BaseCondition): \"\"\" A class storing all the", "to the VN. | \"\"\" # noqa: E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_all())", "[typing.Iterable][] of [int][]s | *SOME* | Find using an array of visual novel", "\"\"\" return cls(\"=\", \"!=\", *symbols) @classmethod def fill_all(cls, *symbols: str) -> Operator: \"\"\"", "| Find using an array of visual novel IDs. | | LABEL |", "or [str][] | *SOME* | Filter using the language, the VN is available", "the VN. | | ORIG_LANG_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* |", "class ProducerCondition(BaseCondition): \"\"\" A class storing all the attributes `Producer` type supports as", "__slots__ = () class UserCondition(BaseCondition): \"\"\" A class storing all the attributes `User`", "`SOME` means only operators (`==`, `!=`) are supported. `SOME + X` means `SOME`", "[int][] | *(==)* | Find using user ID. The special value '0' is", "Description | |-----------|-----------------------------------|----------------------|-------------------------------------------------------------| | AID | [int][] | *(==)* | Find staff by", "t.Final[_ConditionProxy] = _ConditionProxy( \"freeware\", operator=Operator(\"=\") ) DOUJIN: t.Final[_ConditionProxy] = _ConditionProxy(\"doujin\", operator=Operator(\"=\")) TYPE: t.Final[_ConditionProxy]", "| FIRST_CHAR | [None][] or [str][] | *SOME* | Filter using the first", "`Field Value Type` means the type of value against which the field should", "t.Final[_ConditionProxy] = _ConditionProxy( \"firstchar\", operator=Operator.fill_some() ) ORIG_LANG: t.Final[_ConditionProxy] = _ConditionProxy( \"orig_lang\", operator=Operator.fill_some() )", "ORIG_LANG | [str][] | *SOME* | Filter using the original language of the", "| | VN | [int][] | *(==)* | Find characters linked to the", "aliases fields. | \"\"\" # noqa: E501 NAME: t.Final[_ConditionProxy] = _ConditionProxy( \"name\", operator=Operator.fill_some(\"~\")", "*(==)* | Find releases linked to the given producer ID. | | TITLE", "A [typing.Iterable][] of [int][]s | *(==)* | Find characters linked to the given", "the given visual novel IDs in the array. | | PRODUCER | [int][]", "| | TITLE | [str][] | *SOME + (%)* | Find the release", "the `UlistLabelsCondition` class for more information. | Attribute | Field Value Type |", "T __all__ = ( \"VNCondition\", \"BaseCondition\", \"ReleaseCondition\", \"ProducerCondition\", \"CharacterCondition\", \"StaffCondition\", \"QuoteCondition\", \"UserCondition\", \"UlistLabelsCondition\",", "StaffCondition(BaseCondition): \"\"\" A class storing all the attributes `Staff` type supports as condition.", "fills the `=` and `!=` symbols. \"\"\" return cls(\"=\", \"!=\", *symbols) @classmethod def", "Field Value Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|----------------------------------------------------------------------------------------------| | VN |", "type supports as condition. Hint: Check the `BaseCondition` class for more information. Info:", "VN. | | SEARCH | [str][] | *(%)* | Search for the VN", "Attribute | Field Value Type | Operations Supported | Description | |--------------|-----------------------------------|----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| |", "storing all the attributes `Staff` type supports as condition. Hint: Check the `BaseCondition`", "more information. | Attribute | Field Value Type | Operations Supported | Description", "character. | | ORIGINAL | [None][] or [str][] | *SOME + (%)* |", "t.Final[_ConditionProxy] = _ConditionProxy( \"vn\", operator=Operator.fill_some() ) LABEL: t.Final[_ConditionProxy] = _ConditionProxy(\"label\", operator=Operator(\"~\")) __slots__ =", "Value Type | Operations Supported | Description | |-----------|------------------|----------------------|------------------------------------------------------------------------------------------| | UID | [int][]", "| [bool][] | *(==)* | Check if the release is a freeware. |", "= _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) VN: t.Final[_ConditionProxy] =", "Filter using an `ID` | | ID_ARRAY | A [typing.Iterable][] of [int][]s |", "| Filter using the release date of the VN. | | LANGUAGES |", "| Find using original/official name of the character. Can't use `%` with `None`.", "*symbols (str): The additional symbols of the operator. Returns: Operator: The created Operator", "from ..interface import T __all__ = ( \"VNCondition\", \"BaseCondition\", \"ReleaseCondition\", \"ProducerCondition\", \"CharacterCondition\", \"StaffCondition\",", "SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = () class QuoteCondition(BaseCondition): \"\"\" A class", "`%` with `None`. | | SEARCH | [str][] | *(%)* | Performs a", "`RELEASED`. | | RELEASED_DATE | [datetime.date][] | *ALL* | Filter using the release", "A class storing all the attributes `Staff` type supports as condition. Hint: Check", "the `=`, `!=`, `>`, `<`, `>=`, `<=` symbols. \"\"\" return cls(\"=\", \"!=\", \">\",", "[str][] | *SOME + (%)* | Find the release using the title. |", "t.Final[_ConditionProxy] = _ConditionProxy(\"doujin\", operator=Operator(\"=\")) TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some() ) GTIN: t.Final[_ConditionProxy]", "\"\"\" self.symbols = symbols @classmethod def fill_some(cls, *symbols: str) -> Operator: \"\"\" A", "| Filter using an array of PLATFORMS. | \"\"\" # noqa: E501 VN:", "Catalog number. | | LANGUAGES | [str][] | *SOME* | Filter using the", "| ID | [int][] | ALL | Filter using an `ID` | |", "the given traits, the `!=` filter will return chars that are not linked", "LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* | Filter using an array", "TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some()", "release using the original/official title. (`%` operation not supported for `None`) | |", "an Operator object with some symbols. Args: *symbols (str): The additional symbols of", "= _ConditionProxy( \"id\", operator=Operator.fill_some() ) __slots__ = () class VNCondition(BaseCondition): \"\"\" A class", "value '0' is recognized as the currently logged in user. | \"\"\" #", "_ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy]", "of the operator. \"\"\" self.symbols = symbols @classmethod def fill_some(cls, *symbols: str) ->", "| |-----------|-----------------------------------|----------------------|------------------------------------------| | VN | [int][] | *ALL* | Find by visual novel", "[int][]s | *SOME* | Find using an array of visual novel IDs. |", "Warning: This object is not meant to be created by users. \"\"\" __slots__", "__init__(self, *symbols: str) -> None: \"\"\" Operator constructor. Args: *symbols (str): The symbols", "an array of the original languages of the VN. | | SEARCH |", "| [int][] | ALL | Filter using an `ID` | | ID_ARRAY |", "| Filter using an array of PLATFORMS. | | RELEASED | [None][] |", "\"producer\": ProducerCondition, \"character\": CharacterCondition, \"staff\": StaffCondition, \"quote\": QuoteCondition, \"user\": UserCondition, \"ulist-labels\": UlistLabelsCondition, \"ulist\":", "Operations Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------| | NAME | [str][] | *SOME +", "| VN_ARRAY | A [typing.Iterable][] of [int][]s | *(==)* | Find characters linked", "| | CATALOG | [str][] | *SOME* | Filter using the Catalog number.", "\"vn\": VNCondition, \"release\": ReleaseCondition, \"producer\": ProducerCondition, \"character\": CharacterCondition, \"staff\": StaffCondition, \"quote\": QuoteCondition, \"user\":", "Operator object. Info: This method fills the `=`, `!=`, `>`, `<`, `>=`, `<=`", "Value Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------| | NAME | [str][]", "Supported | Description | |-----------|------------------|----------------------|------------------------------------------------------------------------------------------| | UID | [int][] | *(==)* | Find", "using the release date of the VN. | | LANGUAGES | [None][] or", "__slots__ = () class ReleaseCondition(BaseCondition): \"\"\" A class storing all the attributes `Release`", "`!=`) operators. `|UserCondition.USERNAME| SOME + (%)|` supports (`==`, `!=`, `%`) operators. If there", "| \"\"\" # noqa: E501 AID: t.Final[_ConditionProxy] = _ConditionProxy(\"aid\", operator=Operator(\"=\")) AID_ARRAY: t.Final[_ConditionProxy] =", "title. | | ORIGINAL | [None][] or [str][] | *SOME + (%)* |", ") ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"date\",", "*SOME + (%)* | Find using name of producer. | | ORIGINAL |", "[None][] or [str][] | *SOME* | Filter using the first character of the", "[int][]s | SOME | Filter using an array of `ID`s.| \"\"\" # noqa:", "\"\"\" # noqa: E501 ID: t.Final[_ConditionProxy] = _ConditionProxy(\"id\", operator=Operator.fill_all()) ID_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy(", "Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------| | NAME | [str][] | *SOME + (%)*", "using the PLATFORMS field. | | PLATFORMS_ARRAY | A [typing.Iterable][] of [str][]s |", "cls(\"=\", \"!=\", *symbols) @classmethod def fill_all(cls, *symbols: str) -> Operator: \"\"\" A factory", "TRAITS_ARRAY: t.Final[_ConditionProxy] = TRAITS __slots__ = () class StaffCondition(BaseCondition): \"\"\" A class storing", "LABEL | [int][] | *(==)* | Label assigned to the VN. | \"\"\"", "= _ConditionProxy(\"tags\", Operator.fill_some()) TAGS_ARRAY: t.Final[_ConditionProxy] = TAGS __slots__ = () class ReleaseCondition(BaseCondition): \"\"\"", "= () def _condition_selector( type: t.Type[T], ): condition_map = { \"vn\": VNCondition, \"release\":", "\"ulist-labels\": UlistLabelsCondition, \"ulist\": UlistCondition, } cls = condition_map[ type.__name__.lower() if type != UlistLabels", "object for storing operators for XCondition attributes to check condition support. Warning: This", "| *SOME* | Filter using the type of release. | | GTIN |", "of values and these fields yield an iterable of objects which match the", "| *SOME* | Filter using the original language of the VN. | |", "of character. | | ORIGINAL | [None][] or [str][] | *SOME + (%)*", "`ID_ARRAY` filters of `BaseCondition`. \"\"\" # noqa: E501 __slots__ = () class UserCondition(BaseCondition):", "*SOME* | Filter using the PLATFORMS field. | | PLATFORMS_ARRAY | A [typing.Iterable][]", "[int][]s | *(==)* | Find characters linked to the given visual novel ID", "be created by users. \"\"\" __slots__ = (\"symbols\",) def __init__(self, *symbols: str) ->", "| Find by visual novel ID. | | VN_ARRAY | A [typing.Iterable][] of", "= _ConditionProxy(\"vn\", operator=Operator.fill_all()) VN_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"vn\", operator=Operator.fill_some() ) LABEL: t.Final[_ConditionProxy] =", "of languages, the release is available in. | | PLATFORMS | [str][] |", "of [str][]s | *(==)* | Find user using an array of usernames. |", "using the first character of the VN or None to match all the", "operation not supported for `None`) | | RELEASED | [None][] | *SOME* |", "= _ConditionProxy( \"firstchar\", operator=Operator.fill_some() ) ORIG_LANG: t.Final[_ConditionProxy] = _ConditionProxy( \"orig_lang\", operator=Operator.fill_some() ) ORIG_LANG_ARRAY:", "= { \"vn\": VNCondition, \"release\": ReleaseCondition, \"producer\": ProducerCondition, \"character\": CharacterCondition, \"staff\": StaffCondition, \"quote\":", "| *SOME* | Find VNs by tag. | | TAGS_ARRAY | A [typing.Iterable][]", "for storing operators for XCondition attributes to check condition support. Warning: This object", "t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator.fill_some(\"~\") ) USERNAME_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator(\"=\") )", "_ConditionProxy(\"search\", operator=Operator(\"~\")) VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator(\"=\")) VN_ARRAY: t.Final[_ConditionProxy] = VN TRAITS: t.Final[_ConditionProxy]", "of [int][]s | *SOME* | The `=` filter will return chars that are", "Description | |----------------|-----------------------------------|----------------------|----------------------------------------| | USERNAME | [str][] | *SOME + (%)* | Find", "a search on the name, original and aliases fields. | \"\"\" # noqa:", "| *SOME* | Filter using a `None` value for `RELEASED`. | | RELEASED_DATE", "_ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\",", "*SOME* | Filter using the array of languages, the VN is available in.", "[str][]s | *SOME* | Filter using an array of PLATFORMS. | \"\"\" #", "(\"symbols\",) def __init__(self, *symbols: str) -> None: \"\"\" Operator constructor. Args: *symbols (str):", "to the given visual novel ID. | | VN_ARRAY | A [typing.Iterable][] of", "t.Final[_ConditionProxy] = TRAITS __slots__ = () class StaffCondition(BaseCondition): \"\"\" A class storing all", "`ID` and `ID_ARRAY` filters of `BaseCondition`. \"\"\" # noqa: E501 __slots__ = ()", "t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") )", "producer. Can't use `%` with `None`. | | TYPE | [str][] | *SOME*", "operator=Operator.fill_some() ) LANGUAGE: t.Final[_ConditionProxy] = _ConditionProxy( \"language\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGE", "DOUJIN | [bool][] | *(==)* | Check if the release is a doujin.", "Value Type` means the type of value against which the field should be", "`<`, `>=`, `<=`) operators. `|BaseCondition.ID_ARRAY| SOME |` supports only (`==`, `!=`) operators. `|UserCondition.USERNAME|", "ReleaseCondition(BaseCondition): \"\"\" A class storing all the attributes `Release` type supports as condition.", "|----------------|-----------------------------------|----------------------|----------------------------------------| | USERNAME | [str][] | *SOME + (%)* | Find user by", "| RELEASED | [None][] | *SOME* | Filter using a `None` value for", "| [str][] | *(%)* | Search for the VN using it's title and", "ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_some()", "| Check if the release is a patch. | | FREEWARE | [bool][]", "operators (`==`, `!=`) are supported. `SOME + X` means `SOME` and `X` operators", "LANGUAGES | [None][] or [str][] | *SOME* | Filter using the language, the", "operators for XCondition attributes to check condition support. Warning: This object is not", "by an array of alias IDs. | | SEARCH | [str][] | *(%)*", "t.Final[_ConditionProxy] = _ConditionProxy(\"id\", operator=Operator.fill_all()) ID_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"id\", operator=Operator.fill_some() ) __slots__ =", "from `BaseCondition` and doesn't have `ID` and `ID_ARRAY` filters. | Attribute | Field", "`ID` and `ID_ARRAY` filters. | Attribute | Field Value Type | Operations Supported", "PLATFORMS __slots__ = () class ProducerCondition(BaseCondition): \"\"\" A class storing all the attributes", "\"\"\" Operator constructor. Args: *symbols (str): The symbols of the operator. Attributes: symbols", "\">=\", \"<\", \"<=\", *symbols) class BaseCondition: \"\"\" A base class storing the comman", "*SOME* | Filter using the JAN/UPC/EAN code. | | CATALOG | [str][] |", "the VN. | | PATCH | [bool][] | *(==)* | Check if the", "| [int][] | *(==)* | Label assigned to the VN. | \"\"\" #", "| | FREEWARE | [bool][] | *(==)* | Check if the release is", "staff by alias ID. | | AID_ARRAY | A [typing.Iterable][] of [int][]s |", "the `=` and `!=` symbols. \"\"\" return cls(\"=\", \"!=\", *symbols) @classmethod def fill_all(cls,", "= _ConditionProxy( \"catalog\", operator=Operator.fill_some() ) LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY:", "class storing all the attributes `UlistLabels` type supports as condition. Info: This class", "operator=Operator(\"=\") ) DOUJIN: t.Final[_ConditionProxy] = _ConditionProxy(\"doujin\", operator=Operator(\"=\")) TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some()", "the API. | Field | Field Value Type | Operations Supported | Description", "| | LANGUAGES | [str][] | *SOME* | Filter using the language, the", "as condition. Hint: Check the `BaseCondition` class for more information. Info: This one", "noqa: E501 ID: t.Final[_ConditionProxy] = _ConditionProxy(\"id\", operator=Operator.fill_all()) ID_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"id\", operator=Operator.fill_some()", "if the release is a doujin. | | TYPE | [str][] | *SOME*", "ORIG_LANG SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) TAGS: t.Final[_ConditionProxy] = _ConditionProxy(\"tags\", Operator.fill_some()) TAGS_ARRAY: t.Final[_ConditionProxy]", "original/official title. (`%` operation not supported for `None`) | | RELEASED | [None][]", "PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy]", "| GTIN | [int][] | *SOME* | Filter using the JAN/UPC/EAN code. |", "= _ConditionProxy(\"patch\", operator=Operator(\"=\")) FREEWARE: t.Final[_ConditionProxy] = _ConditionProxy( \"freeware\", operator=Operator(\"=\") ) DOUJIN: t.Final[_ConditionProxy] =", "fields. | | VN | [int][] | *(==)* | Find characters linked to", "| | SEARCH | [str][] | *(%)* | Search for the VN using", "( \"VNCondition\", \"BaseCondition\", \"ReleaseCondition\", \"ProducerCondition\", \"CharacterCondition\", \"StaffCondition\", \"QuoteCondition\", \"UserCondition\", \"UlistLabelsCondition\", \"UlistCondition\", \"_condition_selector\", )", "using an array of PLATFORMS. | \"\"\" # noqa: E501 VN: t.Final[_ConditionProxy] =", "| VN | [int][] | *(==)* | Find characters linked to the given", "PATCH | [bool][] | *(==)* | Check if the release is a patch.", "+ (%)* | Find using original/official name of the producer. Can't use `%`", "All `X_ARRAY` fields must be conditioned against an Iterable of values and these", "languages of the VN. | | SEARCH | [str][] | *(%)* | Search", "characters linked to the given visual novel ID array. | | TRAITS |", "the VN or None to match all the vn not starting with an", "t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some() ) LANGUAGE: t.Final[_ConditionProxy] = _ConditionProxy( \"language\", operator=Operator.fill_some() )", "\"\"\" __slots__ = (\"symbols\",) def __init__(self, *symbols: str) -> None: \"\"\" Operator constructor.", "array of the original languages of the VN. | | SEARCH | [str][]", "_ConditionProxy( \"firstchar\", operator=Operator.fill_some() ) ORIG_LANG: t.Final[_ConditionProxy] = _ConditionProxy( \"orig_lang\", operator=Operator.fill_some() ) ORIG_LANG_ARRAY: t.Final[_ConditionProxy]", "E501 ID: t.Final[_ConditionProxy] = _ConditionProxy(\"id\", operator=Operator.fill_all()) ID_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"id\", operator=Operator.fill_some() )", ") DOUJIN: t.Final[_ConditionProxy] = _ConditionProxy(\"doujin\", operator=Operator(\"=\")) TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some() )", "supports only (`==`, `!=`) operators. `|UserCondition.USERNAME| SOME + (%)|` supports (`==`, `!=`, `%`)", "| | RELEASED_DATE | date | *ALL* | Filter using the release date", "UlistLabelsCondition: \"\"\" A class storing all the attributes `UlistLabels` type supports as condition.", "| [str][] | *(%)* | Performs a search on the name, original and", "[str][] | *SOME + (%)* | Find using original/official name of the character.", "the operator. Attributes: symbols (t.Tuple[str]): The symbols of the operator. \"\"\" self.symbols =", "t.Final[_ConditionProxy] = VN PRODUCER: t.Final[_ConditionProxy] = _ConditionProxy( \"producer\", operator=Operator(\"=\") ) TITLE: t.Final[_ConditionProxy] =", "VN_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* | Find using an array", "*SOME* | Filter using an array of languages of producer. | | SEARCH", "| *SOME* | Filter using an array of PLATFORMS. | | PLATFORMS_ARRAY |", "an array of visual novel IDs. | | LABEL | [int][] | *(==)*", "JAN/UPC/EAN code. | | CATALOG | [str][] | *SOME* | Filter using the", "SOME |` supports only (`==`, `!=`) operators. `|UserCondition.USERNAME| SOME + (%)|` supports (`==`,", "= _ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS RELEASED: t.Final[_ConditionProxy] = _ConditionProxy(", "| *SOME* | Filter using the language, the VN is available in. |", "A class storing all the attributes `UlistLabels` type supports as condition. Info: This", "= _ConditionProxy( \"orig_lang\", operator=Operator.fill_some() ) ORIG_LANG_ARRAY: t.Final[_ConditionProxy] = ORIG_LANG SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\",", "| [None][] or [str][] | *SOME* | Filter using the PLATFORMS field. |", "| *SOME* | Filter using language of producer. | | LANGUAGES_ARRAY | A", ") ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\",", "logged in user. | \"\"\" # noqa: E501 UID: t.Final[_ConditionProxy] = _ConditionProxy(\"uid\", operator=Operator(\"=\"))", "class Operator: \"\"\" An object for storing operators for XCondition attributes to check", "*SOME* | The `=` filter will return chars that are linked to any", "\"catalog\", operator=Operator.fill_some() ) LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] =", "all the attributes `Staff` type supports as condition. Hint: Check the `BaseCondition` class", "Returns: Operator: The created Operator object. Info: This method fills the `=` and", ") ORIG_LANG_ARRAY: t.Final[_ConditionProxy] = ORIG_LANG SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) TAGS: t.Final[_ConditionProxy] =", "doujin. | | TYPE | [str][] | *SOME* | Filter using the type", "| NAME | [str][] | *SOME + (%)* | Find using name of", "that are linked to any (not all) of the given traits, the `!=`", "t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_all() ) LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some() )", "\"staff\": StaffCondition, \"quote\": QuoteCondition, \"user\": UserCondition, \"ulist-labels\": UlistLabelsCondition, \"ulist\": UlistCondition, } cls =", "the vn not starting with an alphabet. | | ORIG_LANG | [str][] |", "*(==)* | Find staff by an array of alias IDs. | | SEARCH", "language, the VN is available in. | | LANGUAGES_ARRAY | A [typing.Iterable][] of", "t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) TAGS: t.Final[_ConditionProxy] = _ConditionProxy(\"tags\", Operator.fill_some()) TAGS_ARRAY: t.Final[_ConditionProxy] = TAGS", "= _ConditionProxy( \"name\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) TYPE:", "= _ConditionProxy(\"vn\", operator=Operator(\"=\")) VN_ARRAY: t.Final[_ConditionProxy] = VN TRAITS: t.Final[_ConditionProxy] = _ConditionProxy( \"traits\", operator=Operator.fill_some()", "_condition_selector( type: t.Type[T], ): condition_map = { \"vn\": VNCondition, \"release\": ReleaseCondition, \"producer\": ProducerCondition,", "| ORIGINAL | [None][] or [str][] | *SOME + (%)* | Find the", "|-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------------------------------| | TITLE | [str][] | *SOME + (%)* | Filter using the", "storing all the attributes `Producer` type supports as condition. Hint: Check the `BaseCondition`", "storing all the attributes `Character` type supports as condition. Hint: Check the `BaseCondition`", "t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some() )", "_ConditionProxy( \"orig_lang\", operator=Operator.fill_some() ) ORIG_LANG_ARRAY: t.Final[_ConditionProxy] = ORIG_LANG SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\"))", "all the attributes `Character` type supports as condition. Hint: Check the `BaseCondition` class", "characters by trait. | | TRAITS_ARRAY | A [typing.Iterable][] of [int][]s | *SOME*", "or None to match all the vn not starting with an alphabet. |", "| |-----------|-----------------------------------|----------------------|-------------------------------------------------------------| | AID | [int][] | *(==)* | Find staff by alias", "[typing.Iterable][] of [int][]s | *SOME* | Find VNs using an array of tags.", "class UserCondition(BaseCondition): \"\"\" A class storing all the attributes `User` type supports as", "| Filter using the array of languages, the VN is available in. |", "to the given producer ID. | | TITLE | [str][] | *SOME +", "| [str][] | *SOME* | Filter using language of producer. | | LANGUAGES_ARRAY", "recognized as the currently logged in user. | \"\"\" # noqa: E501 UID:", ") PATCH: t.Final[_ConditionProxy] = _ConditionProxy(\"patch\", operator=Operator(\"=\")) FREEWARE: t.Final[_ConditionProxy] = _ConditionProxy( \"freeware\", operator=Operator(\"=\") )", "condition. Hint: Check the `BaseCondition` class for more information. | Attribute | Field", "A class storing all the attributes `Ulist` type supports as condition. Hint: Check", "() class StaffCondition(BaseCondition): \"\"\" A class storing all the attributes `Staff` type supports", "| Operations Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------| | NAME | [str][] | *SOME", "[int][] | *SOME* | Find characters by trait. | | TRAITS_ARRAY | A", "search on the name, original and aliases fields. | | VN | [int][]", "A [typing.Iterable][] of [str][]s | *SOME* | Filter using the array of languages,", "Check if the release is a patch. | | FREEWARE | [bool][] |", "| SEARCH | [str][] | *(%)* | Performs a search on the name,", "Field Value Type | Operations Supported | Description | |-----------|-----------------------------------|----------------------|-------------------------------------------------------------| | AID |", "+ (%)* | Find the release using the title. | | ORIGINAL |", "| Find user using an array of usernames. | \"\"\" # noqa: E501", "meant to be created by users. \"\"\" __slots__ = (\"symbols\",) def __init__(self, *symbols:", "VN | [int][] | *ALL* | Find by visual novel ID. | |", "RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_all()", "() class QuoteCondition(BaseCondition): \"\"\" A class storing all the attributes `Staff` type supports", "_ConditionProxy( \"name\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) SEARCH: t.Final[_ConditionProxy]", "operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_all() ) PATCH: t.Final[_ConditionProxy] = _ConditionProxy(\"patch\",", "= _ConditionProxy( \"released\", operator=Operator.fill_all() ) LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY:", "array of languages of producer. | | SEARCH | [str][] | *(%)* |", "given visual novel ID. | | VN_ARRAY | A [typing.Iterable][] of [int][]s |", "`ALL` nor `SOME` in the condition but an operator is specified, then that", "value against which the field should be conditioned. Tip: All `X_ARRAY` fields must", "TAGS: t.Final[_ConditionProxy] = _ConditionProxy(\"tags\", Operator.fill_some()) TAGS_ARRAY: t.Final[_ConditionProxy] = TAGS __slots__ = () class", "type of release. | | GTIN | [int][] | *SOME* | Filter using", "an array of languages of producer. | | SEARCH | [str][] | *(%)*", "return chars that are linked to any (not all) of the given traits,", "CharacterCondition, \"staff\": StaffCondition, \"quote\": QuoteCondition, \"user\": UserCondition, \"ulist-labels\": UlistLabelsCondition, \"ulist\": UlistCondition, } cls", "Operations Supported | Description | |-----------|-----------------------------------|----------------------|------------------------------------------| | VN | [int][] | *ALL* |", "Operator: The created Operator object. Info: This method fills the `=` and `!=`", "TAGS | [int][] | *SOME* | Find VNs by tag. | | TAGS_ARRAY", "specified, then that means only that operator is supported. I hope you understand", "`%`) operators. If there is neither `ALL` nor `SOME` in the condition but", "producer. | | LANGUAGE | [str][] | *SOME* | Filter using language of", "type supports as condition. Hint: Check the `BaseCondition` class for more information. |", "VN | [int][] | *(==)* | Find characters linked to the given visual", "and `ID_ARRAY` filters of `BaseCondition`. \"\"\" # noqa: E501 __slots__ = () class", "_ConditionProxy from ..objects import UlistLabels if t.TYPE_CHECKING: from ..interface import T __all__ =", "= ( \"VNCondition\", \"BaseCondition\", \"ReleaseCondition\", \"ProducerCondition\", \"CharacterCondition\", \"StaffCondition\", \"QuoteCondition\", \"UserCondition\", \"UlistLabelsCondition\", \"UlistCondition\", \"_condition_selector\",", "fields yield an iterable of objects which match the values from the API.", "| A [typing.Iterable][] of [int][]s | *(==)* | Find characters linked to the", "Attribute | Field Value Type | Operations Supported | Description | |----------------|-----------------------------------|----------------------|----------------------------------------| |", "check condition support. Warning: This object is not meant to be created by", "original and aliases fields. | \"\"\" # noqa: E501 AID: t.Final[_ConditionProxy] = _ConditionProxy(\"aid\",", "`UlistLabels` type supports as condition. Info: This class doesn't inherit from `BaseCondition` and", "Find releases linked to the given visual novel ID. | | VN_ARRAY |", "the attributes `User` type supports as condition. Hint: Check the `BaseCondition` class for", "operators. `|UserCondition.USERNAME| SOME + (%)|` supports (`==`, `!=`, `%`) operators. If there is", "Description | |-----------------|-----------------------------------|----------------------|----------------------------------------------------------------------------------------------| | VN | [int][] | *ALL* | Find releases linked", "given traits. | \"\"\" # noqa: E501 NAME: t.Final[_ConditionProxy] = _ConditionProxy( \"name\", operator=Operator.fill_some(\"~\")", "The created Operator object. Info: This method fills the `=`, `!=`, `>`, `<`,", "|-----------|------------------|----------------------|------------------------------------------------------------------------------------------| | UID | [int][] | *(==)* | Find using user ID. The", "[int][] | *(==)* | Find releases linked to the given producer ID. |", "class StaffCondition(BaseCondition): \"\"\" A class storing all the attributes `Staff` type supports as", "UID: t.Final[_ConditionProxy] = _ConditionProxy(\"uid\", operator=Operator(\"=\")) class UlistCondition(UlistLabelsCondition): \"\"\" A class storing all the", "return cls(\"=\", \"!=\", *symbols) @classmethod def fill_all(cls, *symbols: str) -> Operator: \"\"\" A", "*SOME* | Filter using the Catalog number. | | LANGUAGES | [str][] |", "\"type\", operator=Operator.fill_some() ) LANGUAGE: t.Final[_ConditionProxy] = _ConditionProxy( \"language\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] =", "| *(==)* | Find using user ID. The special value '0' is recognized", "`=` and `!=` symbols. \"\"\" return cls(\"=\", \"!=\", *symbols) @classmethod def fill_all(cls, *symbols:", "condition attributes. Tip: `ALL` below means all operators (`==`, `!=`, `>`, `<`, `>=`,", "AID SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = () class QuoteCondition(BaseCondition): \"\"\" A", "|--------------|-----------------------------------|----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | NAME | [str] | *SOME + (%)* | Find using name", "Operator: The created Operator object. Info: This method fills the `=`, `!=`, `>`,", "supports as condition. Hint: Check the `UlistLabelsCondition` class for more information. | Attribute", "a doujin. | | TYPE | [str][] | *SOME* | Filter using the", "| [bool][] | *(==)* | Check if the release is a patch. |", "the PLATFORMS field. | | PLATFORMS_ARRAY | A [typing.Iterable][] of [str][]s | *SOME*", "| | PATCH | [bool][] | *(==)* | Check if the release is", "Value Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------------------------------| | TITLE | [str][]", "release. | | GTIN | [int][] | *SOME* | Filter using the JAN/UPC/EAN", "is available in. | | FIRST_CHAR | [None][] or [str][] | *SOME* |", "*SOME* | Find using an array of visual novel IDs. | | LABEL", "= _ConditionProxy( \"date\", operator=Operator.fill_all() ) PATCH: t.Final[_ConditionProxy] = _ConditionProxy(\"patch\", operator=Operator(\"=\")) FREEWARE: t.Final[_ConditionProxy] =", "Operations Supported | Description | |----------|-----------------------------------|----------------------|--------------------------------| | ID | [int][] | ALL |", "| TITLE | [str][] | *SOME + (%)* | Filter using the TITLE", ") USERNAME_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator(\"=\") ) __slots__ = () class UlistLabelsCondition:", "of [int][]s | *(==)* | Find staff by an array of alias IDs.", "VN_ARRAY: t.Final[_ConditionProxy] = VN PRODUCER: t.Final[_ConditionProxy] = _ConditionProxy( \"producer\", operator=Operator(\"=\") ) TITLE: t.Final[_ConditionProxy]", "the attributes `Ulist` type supports as condition. Hint: Check the `UlistLabelsCondition` class for", "| Filter using language of producer. | | LANGUAGES_ARRAY | A [typing.Iterable][] of", "_ConditionProxy( \"username\", operator=Operator(\"=\") ) __slots__ = () class UlistLabelsCondition: \"\"\" A class storing", "t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator(\"=\")) VN_ARRAY: t.Final[_ConditionProxy] = VN", "PLATFORMS field. | | PLATFORMS_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* |", "| *(%)* | Search for the VN using it's title and releases. |", "Find by visual novel ID. | | VN_ARRAY | A [typing.Iterable][] of [int][]s", "t.Type[T], ): condition_map = { \"vn\": VNCondition, \"release\": ReleaseCondition, \"producer\": ProducerCondition, \"character\": CharacterCondition,", "\"\"\" A class storing all the attributes `Character` type supports as condition. Hint:", "`None`. | | TYPE | [str][] | *SOME* | Filter using type of", "method fills the `=`, `!=`, `>`, `<`, `>=`, `<=` symbols. \"\"\" return cls(\"=\",", "and aliases fields. | | VN | [int][] | *(==)* | Find characters", "| [str][] | *SOME + (%)* | Find user by their username. |", "t from .proxy import _ConditionProxy from ..objects import UlistLabels if t.TYPE_CHECKING: from ..interface", "| LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* | Filter using an", "the first character of the VN or None to match all the vn", "UID | [int][] | *(==)* | Find using user ID. The special value", "[str][] | *SOME* | Filter using the Catalog number. | | LANGUAGES |", "[str][] | *SOME* | Filter using type of producer. | | LANGUAGE |", "SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator(\"=\")) VN_ARRAY: t.Final[_ConditionProxy] =", "operator is supported. I hope you understand the above. :) Tip: `Field Value", "or [str][] | *SOME* | Filter using the PLATFORMS field. | | PLATFORMS_ARRAY", "t.Final[_ConditionProxy] = _ConditionProxy( \"producer\", operator=Operator(\"=\") ) TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") )", "# noqa: E501 UID: t.Final[_ConditionProxy] = _ConditionProxy(\"uid\", operator=Operator(\"=\")) class UlistCondition(UlistLabelsCondition): \"\"\" A class", "in user. | \"\"\" # noqa: E501 UID: t.Final[_ConditionProxy] = _ConditionProxy(\"uid\", operator=Operator(\"=\")) class", "chars that are not linked to any of the given traits. | \"\"\"", "the attributes `UlistLabels` type supports as condition. Info: This class doesn't inherit from", "| *(==)* | Check if the release is a freeware. | | DOUJIN", "_ConditionProxy(\"vn\", operator=Operator(\"=\")) VN_ARRAY: t.Final[_ConditionProxy] = VN TRAITS: t.Final[_ConditionProxy] = _ConditionProxy( \"traits\", operator=Operator.fill_some() )", "A [typing.Iterable][] of [str][]s | *SOME* | Filter using an array of PLATFORMS.", "| NAME | [str] | *SOME + (%)* | Find using name of", "means all operators (`==`, `!=`, `>`, `<`, `>=`, `<=`) are supported. `SOME` means", "using an array of PLATFORMS. | | PLATFORMS_ARRAY | A [typing.Iterable][] of [str][]s", "(%)* | Find user by their username. | | USERNAME_ARRAY | A [typing.Iterable][]", "= LANGUAGES PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS", "| | RELEASED | [None][] | *SOME* | Filter using a `None` value", "# noqa: E501 NAME: t.Final[_ConditionProxy] = _ConditionProxy( \"name\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] =", "the array. | | PRODUCER | [int][] | *(==)* | Find releases linked", "operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGE SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ =", "t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = () class QuoteCondition(BaseCondition): \"\"\" A class storing", "ID. | | TITLE | [str][] | *SOME + (%)* | Find the", "the name, original and aliases fields. | \"\"\" # noqa: E501 NAME: t.Final[_ConditionProxy]", "staff by an array of alias IDs. | | SEARCH | [str][] |", "TAGS __slots__ = () class ReleaseCondition(BaseCondition): \"\"\" A class storing all the attributes", "`X_ARRAY` fields must be conditioned against an Iterable of values and these fields", "method for creating an Operator object with some symbols. Args: *symbols (str): The", "Check if the release is a doujin. | | TYPE | [str][] |", "alphabet. | | ORIG_LANG | [str][] | *SOME* | Filter using the original", "and these fields yield an iterable of objects which match the values from", "TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\")", "Supported | Description | |-----------------|-----------------------------------|----------------------|----------------------------------------------------------------------------------------------| | VN | [int][] | *ALL* | Find", "| Filter using an array of the original languages of the VN. |", "= AID SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = () class QuoteCondition(BaseCondition): \"\"\"", "the above. :) Tip: `Field Value Type` means the type of value against", "PLATFORMS. | | PLATFORMS_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* | Filter", "`>`, `<`, `>=`, `<=`) are supported. `SOME` means only operators (`==`, `!=`) are", "conditioned. Tip: All `X_ARRAY` fields must be conditioned against an Iterable of values", "| Field Value Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------------------------------| | TITLE", "| Find VNs using an array of tags. \"\"\" # noqa: E501 TITLE:", "| Description | |----------------|-----------------------------------|----------------------|----------------------------------------| | USERNAME | [str][] | *SOME + (%)* |", "| date | *ALL* | Filter using the release date of the VN.", "operator=Operator.fill_all() ) PATCH: t.Final[_ConditionProxy] = _ConditionProxy(\"patch\", operator=Operator(\"=\")) FREEWARE: t.Final[_ConditionProxy] = _ConditionProxy( \"freeware\", operator=Operator(\"=\")", "| TYPE | [str][] | *SOME* | Filter using type of producer. |", "ID array. | | TRAITS | [int][] | *SOME* | Find characters by", "This object is not meant to be created by users. \"\"\" __slots__ =", "*symbols) @classmethod def fill_all(cls, *symbols: str) -> Operator: \"\"\" A factory method for", "return cls(\"=\", \"!=\", \">\", \">=\", \"<\", \"<=\", *symbols) class BaseCondition: \"\"\" A base", "operator. Attributes: symbols (t.Tuple[str]): The symbols of the operator. \"\"\" self.symbols = symbols", "USERNAME | [str][] | *SOME + (%)* | Find user by their username.", "array. | | PRODUCER | [int][] | *(==)* | Find releases linked to", "| [bool][] | *(==)* | Check if the release is a doujin. |", "Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------------------------------| | TITLE | [str][] | *SOME + (%)*", "yield an iterable of objects which match the values from the API. |", "| Find the release using the original/official title. (`%` operation not supported for", "of tags. \"\"\" # noqa: E501 TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") )", "the given producer ID. | | TITLE | [str][] | *SOME + (%)*", "an array of tags. \"\"\" # noqa: E501 TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\",", "novel IDs in the array. | | PRODUCER | [int][] | *(==)* |", "all the releases linked to the given visual novel IDs in the array.", "| *ALL* | Find releases linked to the given visual novel ID. |", "A base class storing the comman condition attributes. Tip: `ALL` below means all", "with `None`. | | TYPE | [str][] | *SOME* | Filter using type", "an array of `ID`s.| \"\"\" # noqa: E501 ID: t.Final[_ConditionProxy] = _ConditionProxy(\"id\", operator=Operator.fill_all())", "original/official name of the producer. Can't use `%` with `None`. | | TYPE", "using a `None` value for `RELEASED`. | | RELEASED_DATE | [datetime.date][] | *ALL*", "Value Type | Operations Supported | Description | |-----------|-----------------------------------|----------------------|------------------------------------------| | VN | [int][]", "t.Final[_ConditionProxy] = _ConditionProxy( \"id\", operator=Operator.fill_some() ) __slots__ = () class VNCondition(BaseCondition): \"\"\" A", "CharacterCondition(BaseCondition): \"\"\" A class storing all the attributes `Character` type supports as condition.", "[int][] | ALL | Filter using an `ID` | | ID_ARRAY | A", "| |--------------|-----------------------------------|----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | NAME | [str] | *SOME + (%)* | Find using", "= _ConditionProxy( \"name\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) SEARCH:", "`BaseCondition`. \"\"\" # noqa: E501 __slots__ = () class UserCondition(BaseCondition): \"\"\" A class", "novel IDs. | | LABEL | [int][] | *(==)* | Label assigned to", "Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------------------------------| | TITLE | [str][] | *SOME + (%)* | Filter", "all the attributes `Ulist` type supports as condition. Hint: Check the `UlistLabelsCondition` class", "name, original and aliases fields. | | VN | [int][] | *(==)* |", "| Filter using the first character of the VN or None to match", "StaffCondition, \"quote\": QuoteCondition, \"user\": UserCondition, \"ulist-labels\": UlistLabelsCondition, \"ulist\": UlistCondition, } cls = condition_map[", "operator. \"\"\" self.symbols = symbols @classmethod def fill_some(cls, *symbols: str) -> Operator: \"\"\"", "storing all the attributes `Ulist` type supports as condition. Hint: Check the `UlistLabelsCondition`", "Operations Supported | Description | |-----------|-----------------------------------|----------------------|-------------------------------------------------------------| | AID | [int][] | *(==)* |", "using language of producer. | | LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s |", "\"released\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_all() ) LANGUAGES: t.Final[_ConditionProxy] =", "using type of producer. | | LANGUAGE | [str][] | *SOME* | Filter", "SEARCH | [str][] | *(%)* | Search for the VN using it's title", "\"\"\" # noqa: E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_some()) VN_ARRAY: t.Final[_ConditionProxy] = VN", "[str][] | *SOME + (%)* | Find using name of producer. | |", "PRODUCER | [int][] | *(==)* | Find releases linked to the given producer", "*(==)* | Find using user ID. The special value '0' is recognized as", "-> Operator: \"\"\" A factory method for creating an Operator object with some", "Field. | | PLATFORMS | [None][] or [str][] | *SOME* | Filter using", "| A [typing.Iterable][] of [int][]s | *SOME* | Find all the releases linked", "_ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) RELEASED: t.Final[_ConditionProxy]", "ALL | Filter using an `ID` | | ID_ARRAY | A [typing.Iterable][] of", "\"original\", operator=Operator.fill_some(\"~\") ) SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator(\"=\"))", "t.Final[_ConditionProxy] = LANGUAGES FIRSTCHAR: t.Final[_ConditionProxy] = _ConditionProxy( \"firstchar\", operator=Operator.fill_some() ) ORIG_LANG: t.Final[_ConditionProxy] =", "filters of `BaseCondition`. \"\"\" # noqa: E501 __slots__ = () class UserCondition(BaseCondition): \"\"\"", "`%` with `None`. | | TYPE | [str][] | *SOME* | Filter using", "| *SOME* | Find VNs using an array of tags. \"\"\" # noqa:", "Search for the VN using it's title and releases. | | TAGS |", "= () class ProducerCondition(BaseCondition): \"\"\" A class storing all the attributes `Producer` type", "Filter using the release date of the VN. | | PATCH | [bool][]", "*(==)* | Check if the release is a freeware. | | DOUJIN |", "Filter using the TITLE Field. | | PLATFORMS | [None][] or [str][] |", "| | LABEL | [int][] | *(==)* | Label assigned to the VN.", "for more information. Info: This one only supports `ID` and `ID_ARRAY` filters of", "| TRAITS | [int][] | *SOME* | Find characters by trait. | |", "(%)|` supports (`==`, `!=`, `%`) operators. If there is neither `ALL` nor `SOME`", "\"traits\", operator=Operator.fill_some() ) TRAITS_ARRAY: t.Final[_ConditionProxy] = TRAITS __slots__ = () class StaffCondition(BaseCondition): \"\"\"", "the operator. Returns: Operator: The created Operator object. Info: This method fills the", "_ConditionProxy(\"label\", operator=Operator(\"~\")) __slots__ = () def _condition_selector( type: t.Type[T], ): condition_map = {", "= _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some() ) LANGUAGE:", "operator. Returns: Operator: The created Operator object. Info: This method fills the `=`", "is available in. | | LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s | *SOME*", "| Filter using an array of PLATFORMS. | | PLATFORMS_ARRAY | A [typing.Iterable][]", "characters linked to the given visual novel ID. | | VN_ARRAY | A", "should be conditioned. Tip: All `X_ARRAY` fields must be conditioned against an Iterable", "| Field Value Type | Operations Supported | Description | |----------|-----------------------------------|----------------------|--------------------------------| | ID", "| [None][] or [str][] | *SOME* | Filter using the language, the VN", "| *SOME + (%)* | Find using original/official name of the character. Can't", "| Attribute | Field Value Type | Operations Supported | Description | |----------------|-----------------------------------|----------------------|----------------------------------------|", "|` supports (`==`, `!=`, `>`, `<`, `>=`, `<=`) operators. `|BaseCondition.ID_ARRAY| SOME |` supports", "and aliases fields. | \"\"\" # noqa: E501 AID: t.Final[_ConditionProxy] = _ConditionProxy(\"aid\", operator=Operator(\"=\"))", "an alphabet. | | ORIG_LANG | [str][] | *SOME* | Filter using the", "supports as condition. Hint: Check the `BaseCondition` class for more information. | Attribute", "`>=`, `<=`) are supported. `SOME` means only operators (`==`, `!=`) are supported. `SOME", "\"!=\", \">\", \">=\", \"<\", \"<=\", *symbols) class BaseCondition: \"\"\" A base class storing", "VN. | \"\"\" # noqa: E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_all()) VN_ARRAY: t.Final[_ConditionProxy]", "visual novel ID. | | VN_ARRAY | A [typing.Iterable][] of [int][]s | *SOME*", "= (\"symbols\",) def __init__(self, *symbols: str) -> None: \"\"\" Operator constructor. Args: *symbols", "by tag. | | TAGS_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* |", "\"username\", operator=Operator(\"=\") ) __slots__ = () class UlistLabelsCondition: \"\"\" A class storing all", "E501 UID: t.Final[_ConditionProxy] = _ConditionProxy(\"uid\", operator=Operator(\"=\")) class UlistCondition(UlistLabelsCondition): \"\"\" A class storing all", "Supported | Description | |--------------|-----------------------------------|----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | NAME | [str] | *SOME + (%)*", "*SOME + (%)* | Find user by their username. | | USERNAME_ARRAY |", "| VN_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* | Find all the", "that are not linked to any of the given traits. | \"\"\" #", "| A [typing.Iterable][] of [str][]s | *SOME* | Filter using the array of", "|----------|-----------------------------------|----------------------|--------------------------------| | ID | [int][] | ALL | Filter using an `ID` |", "tag. | | TAGS_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* | Find", "of alias IDs. | | SEARCH | [str][] | *(%)* | Performs a", "IDs in the array. | | PRODUCER | [int][] | *(==)* | Find", "\"ulist\": UlistCondition, } cls = condition_map[ type.__name__.lower() if type != UlistLabels else \"ulist-labels\"", "TRAITS | [int][] | *SOME* | Find characters by trait. | | TRAITS_ARRAY", ") __slots__ = () class VNCondition(BaseCondition): \"\"\" A class storing all the attributes", "character. Can't use `%` with `None`. | | SEARCH | [str][] | *(%)*", "above. :) Tip: `Field Value Type` means the type of value against which", "noqa: E501 __slots__ = () class UserCondition(BaseCondition): \"\"\" A class storing all the", "*SOME* | Filter using an array of PLATFORMS. | \"\"\" # noqa: E501", "[bool][] | *(==)* | Check if the release is a doujin. | |", "only (`==`, `!=`) operators. `|UserCondition.USERNAME| SOME + (%)|` supports (`==`, `!=`, `%`) operators.", "user using an array of usernames. | \"\"\" # noqa: E501 USERNAME: t.Final[_ConditionProxy]", "|-----------------|-----------------------------------|----------------------|----------------------------------------------------------------------------------------------| | VN | [int][] | *ALL* | Find releases linked to the", "class storing all the attributes `Producer` type supports as condition. Hint: Check the", "= _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) RELEASED:", "patch. | | FREEWARE | [bool][] | *(==)* | Check if the release", "= _ConditionProxy(\"doujin\", operator=Operator(\"=\")) TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some() ) GTIN: t.Final[_ConditionProxy] =", "of languages, the VN is available in. | | FIRST_CHAR | [None][] or", "match the values from the API. | Field | Field Value Type |", "class storing the comman condition attributes. Tip: `ALL` below means all operators (`==`,", "| Attribute | Field Value Type | Operations Supported | Description | |-----------|-----------------------------------|----------------------|------------------------------------------|", "`User` type supports as condition. Hint: Check the `BaseCondition` class for more information.", "all the attributes `Release` type supports as condition. Hint: Check the `BaseCondition` class", "array of alias IDs. | | SEARCH | [str][] | *(%)* | Performs", "user. | \"\"\" # noqa: E501 UID: t.Final[_ConditionProxy] = _ConditionProxy(\"uid\", operator=Operator(\"=\")) class UlistCondition(UlistLabelsCondition):", "VN. | | ORIG_LANG_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* | Filter", "username. | | USERNAME_ARRAY | A [typing.Iterable][] of [str][]s | *(==)* | Find", "| [int][] | *(==)* | Find releases linked to the given producer ID.", "user ID. The special value '0' is recognized as the currently logged in", "operator=Operator.fill_some() ) ORIG_LANG_ARRAY: t.Final[_ConditionProxy] = ORIG_LANG SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) TAGS: t.Final[_ConditionProxy]", "LANGUAGE SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = () class CharacterCondition(BaseCondition): \"\"\" A", "is specified, then that means only that operator is supported. I hope you", "noqa: E501 NAME: t.Final[_ConditionProxy] = _ConditionProxy( \"name\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy(", "the original language of the VN. | | ORIG_LANG_ARRAY | A [typing.Iterable][] of", "the values from the API. | Field | Field Value Type | Operations", "= _ConditionProxy( \"producer\", operator=Operator(\"=\") ) TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) ORIGINAL:", "cls = condition_map[ type.__name__.lower() if type != UlistLabels else \"ulist-labels\" ] return cls", "_ConditionProxy( \"freeware\", operator=Operator(\"=\") ) DOUJIN: t.Final[_ConditionProxy] = _ConditionProxy(\"doujin\", operator=Operator(\"=\")) TYPE: t.Final[_ConditionProxy] = _ConditionProxy(", "class UlistCondition(UlistLabelsCondition): \"\"\" A class storing all the attributes `Ulist` type supports as", "operator=Operator.fill_some() ) LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES", "for creating an Operator object with some symbols. Args: *symbols (str): The additional", "which the field should be conditioned. Tip: All `X_ARRAY` fields must be conditioned", "original/official name of the character. Can't use `%` with `None`. | | SEARCH", "The created Operator object. Info: This method fills the `=` and `!=` symbols.", "`!=`, `%`) operators. If there is neither `ALL` nor `SOME` in the condition", "of PLATFORMS. | | PLATFORMS_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* |", "[int][] | *(==)* | Find staff by alias ID. | | AID_ARRAY |", "E501 TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\",", "Value Type | Operations Supported | Description | |----------|-----------------------------------|----------------------|--------------------------------| | ID | [int][]", ") CATALOG: t.Final[_ConditionProxy] = _ConditionProxy( \"catalog\", operator=Operator.fill_some() ) LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\",", "| LANGUAGES | [str][] | *SOME* | Filter using the language, the release", "| *SOME* | Filter using the array of languages, the release is available", "_ConditionProxy(\"id\", operator=Operator.fill_all()) ID_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"id\", operator=Operator.fill_some() ) __slots__ = () class", "[str][] | *SOME* | Filter using the language, the VN is available in.", "the currently logged in user. | \"\"\" # noqa: E501 UID: t.Final[_ConditionProxy] =", "_ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES FIRSTCHAR: t.Final[_ConditionProxy] = _ConditionProxy( \"firstchar\",", "| *SOME + (%)* | Find the release using the original/official title. (`%`", "| [str][] | *SOME + (%)* | Find the release using the title.", "is neither `ALL` nor `SOME` in the condition but an operator is specified,", "are linked to any (not all) of the given traits, the `!=` filter", "\"<=\", *symbols) class BaseCondition: \"\"\" A base class storing the comman condition attributes.", "`Character` type supports as condition. Hint: Check the `BaseCondition` class for more information.", "[None][] or [str][] | *SOME + (%)* | Find the release using the", "of `BaseCondition`. \"\"\" # noqa: E501 __slots__ = () class UserCondition(BaseCondition): \"\"\" A", "A class storing all the attributes `Character` type supports as condition. Hint: Check", "attributes `Producer` type supports as condition. Hint: Check the `BaseCondition` class for more", "*(==)* | Check if the release is a patch. | | FREEWARE |", "| | ORIG_LANG_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* | Filter using", "= _ConditionProxy( \"gtin\", operator=Operator.fill_some() ) CATALOG: t.Final[_ConditionProxy] = _ConditionProxy( \"catalog\", operator=Operator.fill_some() ) LANGUAGES:", "\"release\": ReleaseCondition, \"producer\": ProducerCondition, \"character\": CharacterCondition, \"staff\": StaffCondition, \"quote\": QuoteCondition, \"user\": UserCondition, \"ulist-labels\":", "\"VNCondition\", \"BaseCondition\", \"ReleaseCondition\", \"ProducerCondition\", \"CharacterCondition\", \"StaffCondition\", \"QuoteCondition\", \"UserCondition\", \"UlistLabelsCondition\", \"UlistCondition\", \"_condition_selector\", ) class", "operator=Operator(\"=\")) AID_ARRAY: t.Final[_ConditionProxy] = AID SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = ()", "ProducerCondition(BaseCondition): \"\"\" A class storing all the attributes `Producer` type supports as condition.", "# noqa: E501 AID: t.Final[_ConditionProxy] = _ConditionProxy(\"aid\", operator=Operator(\"=\")) AID_ARRAY: t.Final[_ConditionProxy] = AID SEARCH:", "inherit from `BaseCondition` and doesn't have `ID` and `ID_ARRAY` filters. | Attribute |", "Find using original/official name of the character. Can't use `%` with `None`. |", "| [int][] | *SOME* | Find VNs by tag. | | TAGS_ARRAY |", "not supported for `None`) | | RELEASED | [None][] | *SOME* | Filter", "or [str][] | *SOME + (%)* | Find using original/official name of the", ") LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES PLATFORMS:", "character of the VN or None to match all the vn not starting", ") PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS RELEASED:", "user by their username. | | USERNAME_ARRAY | A [typing.Iterable][] of [str][]s |", "Field | Field Value Type | Operations Supported | Description | |----------|-----------------------------------|----------------------|--------------------------------| |", "t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS RELEASED: t.Final[_ConditionProxy] =", "operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) RELEASED: t.Final[_ConditionProxy] = _ConditionProxy(", "ProducerCondition, \"character\": CharacterCondition, \"staff\": StaffCondition, \"quote\": QuoteCondition, \"user\": UserCondition, \"ulist-labels\": UlistLabelsCondition, \"ulist\": UlistCondition,", "*symbols: str) -> Operator: \"\"\" A factory method for creating an Operator object", "import typing as t from .proxy import _ConditionProxy from ..objects import UlistLabels if", "fields must be conditioned against an Iterable of values and these fields yield", "t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator(\"=\")) VN_ARRAY: t.Final[_ConditionProxy] = VN TRAITS: t.Final[_ConditionProxy] = _ConditionProxy( \"traits\",", "| Find characters linked to the given visual novel ID array. | |", "# noqa: E501 USERNAME: t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator.fill_some(\"~\") ) USERNAME_ARRAY: t.Final[_ConditionProxy] =", "Filter using the JAN/UPC/EAN code. | | CATALOG | [str][] | *SOME* |", "CATALOG | [str][] | *SOME* | Filter using the Catalog number. | |", "will return chars that are linked to any (not all) of the given", "\"UlistCondition\", \"_condition_selector\", ) class Operator: \"\"\" An object for storing operators for XCondition", "creating an Operator object with all symbols. Args: *symbols (str): The additional symbols", "Operator object. Info: This method fills the `=` and `!=` symbols. \"\"\" return", "object. Info: This method fills the `=` and `!=` symbols. \"\"\" return cls(\"=\",", "| USERNAME | [str][] | *SOME + (%)* | Find user by their", "using an array of languages of producer. | | SEARCH | [str][] |", "= () class UserCondition(BaseCondition): \"\"\" A class storing all the attributes `User` type", "additional symbols of the operator. Returns: Operator: The created Operator object. Info: This", "it's title and releases. | | TAGS | [int][] | *SOME* | Find", "| | TRAITS_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* | The `=`", "cls(\"=\", \"!=\", \">\", \">=\", \"<\", \"<=\", *symbols) class BaseCondition: \"\"\" A base class", "| *(==)* | Find releases linked to the given producer ID. | |", "linked to any (not all) of the given traits, the `!=` filter will", "and `X` operators are supported. For example: `|BaseCondition.ID| ALL |` supports (`==`, `!=`,", "the attributes `Character` type supports as condition. Hint: Check the `BaseCondition` class for", "Find the release using the title. | | ORIGINAL | [None][] or [str][]", "\"vn\", operator=Operator.fill_some() ) LABEL: t.Final[_ConditionProxy] = _ConditionProxy(\"label\", operator=Operator(\"~\")) __slots__ = () def _condition_selector(", "LANGUAGES | [str][] | *SOME* | Filter using the language, the release is", "| *SOME* | Filter using an array of languages of producer. | |", "of the operator. Attributes: symbols (t.Tuple[str]): The symbols of the operator. \"\"\" self.symbols", ") TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\",", "as condition. Info: This class doesn't inherit from `BaseCondition` and doesn't have `ID`", "[datetime.date][] | *ALL* | Filter using the release date of the VN. |", "of PLATFORMS. | \"\"\" # noqa: E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_some()) VN_ARRAY:", "| *SOME + (%)* | Find user by their username. | | USERNAME_ARRAY", "[typing.Iterable][] of [str][]s | *SOME* | Filter using an array of languages of", "fill_some(cls, *symbols: str) -> Operator: \"\"\" A factory method for creating an Operator", "`<`, `>=`, `<=`) are supported. `SOME` means only operators (`==`, `!=`) are supported.", "+ (%)* | Find using original/official name of the character. Can't use `%`", "using an array of the original languages of the VN. | | SEARCH", "is supported. I hope you understand the above. :) Tip: `Field Value Type`", "[str][] | *SOME* | Filter using language of producer. | | LANGUAGES_ARRAY |", "t.Final[_ConditionProxy] = _ConditionProxy(\"uid\", operator=Operator(\"=\")) class UlistCondition(UlistLabelsCondition): \"\"\" A class storing all the attributes", "| Field | Field Value Type | Operations Supported | Description | |----------|-----------------------------------|----------------------|--------------------------------|", "of producer. | | ORIGINAL | [None][] or [str][] | *SOME + (%)*", "using name of producer. | | ORIGINAL | [None][] or [str][] | *SOME", "(%)* | Find using original/official name of the producer. Can't use `%` with", "object with all symbols. Args: *symbols (str): The additional symbols of the operator.", "t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES FIRSTCHAR: t.Final[_ConditionProxy] =", "operator=Operator.fill_some() ) LABEL: t.Final[_ConditionProxy] = _ConditionProxy(\"label\", operator=Operator(\"~\")) __slots__ = () def _condition_selector( type:", "for the VN using it's title and releases. | | TAGS | [int][]", "| | VN_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* | Find using", "a patch. | | FREEWARE | [bool][] | *(==)* | Check if the", "+ (%)* | Find using name of producer. | | ORIGINAL | [None][]", "import T __all__ = ( \"VNCondition\", \"BaseCondition\", \"ReleaseCondition\", \"ProducerCondition\", \"CharacterCondition\", \"StaffCondition\", \"QuoteCondition\", \"UserCondition\",", "aliases fields. | \"\"\" # noqa: E501 AID: t.Final[_ConditionProxy] = _ConditionProxy(\"aid\", operator=Operator(\"=\")) AID_ARRAY:", "type supports as condition. Info: This class doesn't inherit from `BaseCondition` and doesn't", "[int][] | *(==)* | Label assigned to the VN. | \"\"\" # noqa:", "by users. \"\"\" __slots__ = (\"symbols\",) def __init__(self, *symbols: str) -> None: \"\"\"", "using original/official name of the producer. Can't use `%` with `None`. | |", "type of value against which the field should be conditioned. Tip: All `X_ARRAY`", "t.Final[_ConditionProxy] = _ConditionProxy( \"name\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") )", "*SOME* | Filter using an array of the original languages of the VN.", "\"\"\" # noqa: E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_all()) VN_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy(", "against an Iterable of values and these fields yield an iterable of objects", "class storing all the attributes `VN` type supports as condition. Hint: Check the", "and aliases fields. | \"\"\" # noqa: E501 NAME: t.Final[_ConditionProxy] = _ConditionProxy( \"name\",", "the release is available in. | | LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s", "date of the VN. | | LANGUAGES | [None][] or [str][] | *SOME*", "of the operator. Returns: Operator: The created Operator object. Info: This method fills", "of release. | | GTIN | [int][] | *SOME* | Filter using the", "| | DOUJIN | [bool][] | *(==)* | Check if the release is", "| Attribute | Field Value Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------|", "TRAITS __slots__ = () class StaffCondition(BaseCondition): \"\"\" A class storing all the attributes", "def fill_all(cls, *symbols: str) -> Operator: \"\"\" A factory method for creating an", "| LABEL | [int][] | *(==)* | Label assigned to the VN. |", "from __future__ import annotations import typing as t from .proxy import _ConditionProxy from", "class storing all the attributes `Ulist` type supports as condition. Hint: Check the", "A [typing.Iterable][] of [int][]s | *SOME* | Find using an array of visual", "the `!=` filter will return chars that are not linked to any of", "= _ConditionProxy(\"id\", operator=Operator.fill_all()) ID_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"id\", operator=Operator.fill_some() ) __slots__ = ()", "Field Value Type | Operations Supported | Description | |----------------|-----------------------------------|----------------------|----------------------------------------| | USERNAME |", "have `ID` and `ID_ARRAY` filters. | Attribute | Field Value Type | Operations", "languages of producer. | | SEARCH | [str][] | *(%)* | Performs a", "API. | Field | Field Value Type | Operations Supported | Description |", "using the Catalog number. | | LANGUAGES | [str][] | *SOME* | Filter", "from the API. | Field | Field Value Type | Operations Supported |", "LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy]", "= () class ReleaseCondition(BaseCondition): \"\"\" A class storing all the attributes `Release` type", "Args: *symbols (str): The symbols of the operator. Attributes: symbols (t.Tuple[str]): The symbols", "array of visual novel IDs. | | LABEL | [int][] | *(==)* |", "Find VNs using an array of tags. \"\"\" # noqa: E501 TITLE: t.Final[_ConditionProxy]", "VN is available in. | | LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s |", "producer ID. | | TITLE | [str][] | *SOME + (%)* | Find", "\"\"\" # noqa: E501 USERNAME: t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator.fill_some(\"~\") ) USERNAME_ARRAY: t.Final[_ConditionProxy]", "= PLATFORMS RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy(", "nor `SOME` in the condition but an operator is specified, then that means", "[str][] | *SOME* | Filter using the type of release. | | GTIN", "producer. | | ORIGINAL | [None][] or [str][] | *SOME + (%)* |", "*ALL* | Filter using the release date of the VN. | | PATCH", "| Operations Supported | Description | |----------------|-----------------------------------|----------------------|----------------------------------------| | USERNAME | [str][] | *SOME", "(%)* | Find using original/official name of the character. Can't use `%` with", "| PLATFORMS_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* | Filter using an", "operator=Operator.fill_some() ) ORIG_LANG: t.Final[_ConditionProxy] = _ConditionProxy( \"orig_lang\", operator=Operator.fill_some() ) ORIG_LANG_ARRAY: t.Final[_ConditionProxy] = ORIG_LANG", "= symbols @classmethod def fill_some(cls, *symbols: str) -> Operator: \"\"\" A factory method", "\"StaffCondition\", \"QuoteCondition\", \"UserCondition\", \"UlistLabelsCondition\", \"UlistCondition\", \"_condition_selector\", ) class Operator: \"\"\" An object for", "the VN. | | SEARCH | [str][] | *(%)* | Search for the", "all operators (`==`, `!=`, `>`, `<`, `>=`, `<=`) are supported. `SOME` means only", "*(==)* | Find characters linked to the given visual novel ID. | |", "Operator.fill_some()) TAGS_ARRAY: t.Final[_ConditionProxy] = TAGS __slots__ = () class ReleaseCondition(BaseCondition): \"\"\" A class", "by trait. | | TRAITS_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* |", "VN. | | PATCH | [bool][] | *(==)* | Check if the release", "LABEL: t.Final[_ConditionProxy] = _ConditionProxy(\"label\", operator=Operator(\"~\")) __slots__ = () def _condition_selector( type: t.Type[T], ):", "def _condition_selector( type: t.Type[T], ): condition_map = { \"vn\": VNCondition, \"release\": ReleaseCondition, \"producer\":", "in. | | PLATFORMS | [str][] | *SOME* | Filter using an array", "attributes `Staff` type supports as condition. Hint: Check the `BaseCondition` class for more", "\"!=\", *symbols) @classmethod def fill_all(cls, *symbols: str) -> Operator: \"\"\" A factory method", "\"\"\" # noqa: E501 __slots__ = () class UserCondition(BaseCondition): \"\"\" A class storing", "= _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY:", "symbols of the operator. Attributes: symbols (t.Tuple[str]): The symbols of the operator. \"\"\"", "ID. | | VN_ARRAY | A [typing.Iterable][] of [int][]s | *(==)* | Find", "(%)* | Find using name of producer. | | ORIGINAL | [None][] or", "releases. | | TAGS | [int][] | *SOME* | Find VNs by tag.", "[typing.Iterable][] of [str][]s | *SOME* | Filter using an array of PLATFORMS. |", "The `=` filter will return chars that are linked to any (not all)", "given producer ID. | | TITLE | [str][] | *SOME + (%)* |", "(not all) of the given traits, the `!=` filter will return chars that", "Returns: Operator: The created Operator object. Info: This method fills the `=`, `!=`,", "\"\"\" A factory method for creating an Operator object with some symbols. Args:", "_ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some() ) LANGUAGE: t.Final[_ConditionProxy]", "_ConditionProxy( \"traits\", operator=Operator.fill_some() ) TRAITS_ARRAY: t.Final[_ConditionProxy] = TRAITS __slots__ = () class StaffCondition(BaseCondition):", "\"\"\" A class storing all the attributes `VN` type supports as condition. Hint:", "| Attribute | Field Value Type | Operations Supported | Description | |-----------|-----------------------------------|----------------------|-------------------------------------------------------------|", "storing all the attributes `UlistLabels` type supports as condition. Info: This class doesn't", "producer. | | LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* | Filter", "means `SOME` and `X` operators are supported. For example: `|BaseCondition.ID| ALL |` supports", "`!=`, `>`, `<`, `>=`, `<=` symbols. \"\"\" return cls(\"=\", \"!=\", \">\", \">=\", \"<\",", "__slots__ = () class UlistLabelsCondition: \"\"\" A class storing all the attributes `UlistLabels`", "Find staff by alias ID. | | AID_ARRAY | A [typing.Iterable][] of [int][]s", "[str][]s | *SOME* | Filter using the array of languages, the release is", "| *ALL* | Find by visual novel ID. | | VN_ARRAY | A", "(%)* | Find the release using the original/official title. (`%` operation not supported", "factory method for creating an Operator object with all symbols. Args: *symbols (str):", "CATALOG: t.Final[_ConditionProxy] = _ConditionProxy( \"catalog\", operator=Operator.fill_some() ) LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some()", ".proxy import _ConditionProxy from ..objects import UlistLabels if t.TYPE_CHECKING: from ..interface import T", "\"date\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_all() ) PATCH: t.Final[_ConditionProxy] =", "+ (%)|` supports (`==`, `!=`, `%`) operators. If there is neither `ALL` nor", "\"user\": UserCondition, \"ulist-labels\": UlistLabelsCondition, \"ulist\": UlistCondition, } cls = condition_map[ type.__name__.lower() if type", "+ (%)* | Find the release using the original/official title. (`%` operation not", "an array of usernames. | \"\"\" # noqa: E501 USERNAME: t.Final[_ConditionProxy] = _ConditionProxy(", "releases linked to the given visual novel ID. | | VN_ARRAY | A", "| \"\"\" # noqa: E501 USERNAME: t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator.fill_some(\"~\") ) USERNAME_ARRAY:", "the language, the VN is available in. | | LANGUAGES_ARRAY | A [typing.Iterable][]", "supports as condition. Info: This class doesn't inherit from `BaseCondition` and doesn't have", "operator=Operator(\"~\")) __slots__ = () def _condition_selector( type: t.Type[T], ): condition_map = { \"vn\":", "\"title\", operator=Operator.fill_some(\"~\") ) PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] =", "t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_all() ) PATCH: t.Final[_ConditionProxy] = _ConditionProxy(\"patch\", operator=Operator(\"=\")) FREEWARE: t.Final[_ConditionProxy]", "if t.TYPE_CHECKING: from ..interface import T __all__ = ( \"VNCondition\", \"BaseCondition\", \"ReleaseCondition\", \"ProducerCondition\",", "The symbols of the operator. Attributes: symbols (t.Tuple[str]): The symbols of the operator.", "the original/official title. (`%` operation not supported for `None`) | | RELEASED |", "trait. | | TRAITS_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* | The", "\"title\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) RELEASED: t.Final[_ConditionProxy] =", "t.Final[_ConditionProxy] = TAGS __slots__ = () class ReleaseCondition(BaseCondition): \"\"\" A class storing all", "the name, original and aliases fields. | | VN | [int][] | *(==)*", "| *SOME + (%)* | Find using name of producer. | | ORIGINAL", "FIRSTCHAR: t.Final[_ConditionProxy] = _ConditionProxy( \"firstchar\", operator=Operator.fill_some() ) ORIG_LANG: t.Final[_ConditionProxy] = _ConditionProxy( \"orig_lang\", operator=Operator.fill_some()", "| | GTIN | [int][] | *SOME* | Filter using the JAN/UPC/EAN code.", "to the given visual novel ID array. | | TRAITS | [int][] |", "as the currently logged in user. | \"\"\" # noqa: E501 UID: t.Final[_ConditionProxy]", "[str][] | *(%)* | Search for the VN using it's title and releases.", "array of languages, the release is available in. | | PLATFORMS | [str][]", "\"released\", operator=Operator.fill_all() ) LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] =", "*symbols: str) -> None: \"\"\" Operator constructor. Args: *symbols (str): The symbols of", "[str][] | *SOME* | Filter using the first character of the VN or", "[str][] | *SOME* | Filter using the original language of the VN. |", "| | LANGUAGE | [str][] | *SOME* | Filter using language of producer.", "[typing.Iterable][] of [int][]s | *SOME* | The `=` filter will return chars that", "t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS __slots__ = ()", "| Filter using the TITLE Field. | | PLATFORMS | [None][] or [str][]", "the condition but an operator is specified, then that means only that operator", "| Filter using the Catalog number. | | LANGUAGES | [str][] | *SOME*", "of the VN. | | PATCH | [bool][] | *(==)* | Check if", "of visual novel IDs. | | LABEL | [int][] | *(==)* | Label", "languages, the release is available in. | | PLATFORMS | [str][] | *SOME*", "_ConditionProxy( \"username\", operator=Operator.fill_some(\"~\") ) USERNAME_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator(\"=\") ) __slots__ =", "visual novel IDs in the array. | | PRODUCER | [int][] | *(==)*", "VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_all()) VN_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"vn\", operator=Operator.fill_some() ) LABEL:", ") ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\"))", "operator is specified, then that means only that operator is supported. I hope", "the given visual novel ID array. | | TRAITS | [int][] | *SOME*", "code. | | CATALOG | [str][] | *SOME* | Filter using the Catalog", "| [str][] | *SOME* | Filter using the language, the release is available", "Can't use `%` with `None`. | | SEARCH | [str][] | *(%)* |", "novel ID. | | VN_ARRAY | A [typing.Iterable][] of [int][]s | *(==)* |", "in the condition but an operator is specified, then that means only that", "[None][] or [str][] | *SOME* | Filter using the PLATFORMS field. | |", "t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_all()) VN_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"vn\", operator=Operator.fill_some() ) LABEL: t.Final[_ConditionProxy]", "<filename>azaka/commands/condition.py<gh_stars>10-100 from __future__ import annotations import typing as t from .proxy import _ConditionProxy", "\"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS __slots__ = () class ProducerCondition(BaseCondition): \"\"\"", "| \"\"\" # noqa: E501 UID: t.Final[_ConditionProxy] = _ConditionProxy(\"uid\", operator=Operator(\"=\")) class UlistCondition(UlistLabelsCondition): \"\"\"", "of the character. Can't use `%` with `None`. | | SEARCH | [str][]", "\"quote\": QuoteCondition, \"user\": UserCondition, \"ulist-labels\": UlistLabelsCondition, \"ulist\": UlistCondition, } cls = condition_map[ type.__name__.lower()", "to be created by users. \"\"\" __slots__ = (\"symbols\",) def __init__(self, *symbols: str)", "ORIG_LANG_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* | Filter using an array", "`<=` symbols. \"\"\" return cls(\"=\", \"!=\", \">\", \">=\", \"<\", \"<=\", *symbols) class BaseCondition:", "t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_all() )", "using name of character. | | ORIGINAL | [None][] or [str][] | *SOME", "| Filter using the original language of the VN. | | ORIG_LANG_ARRAY |", "= LANGUAGES FIRSTCHAR: t.Final[_ConditionProxy] = _ConditionProxy( \"firstchar\", operator=Operator.fill_some() ) ORIG_LANG: t.Final[_ConditionProxy] = _ConditionProxy(", "operator=Operator.fill_some(\"~\") ) PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS", "object with some symbols. Args: *symbols (str): The additional symbols of the operator.", "for `None`) | | RELEASED | [None][] | *SOME* | Filter using a", "| *(%)* | Performs a search on the name, original and aliases fields.", "|-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------| | NAME | [str][] | *SOME + (%)* | Find using name", "class storing all the attributes `User` type supports as condition. Hint: Check the", "storing all the attributes `Release` type supports as condition. Hint: Check the `BaseCondition`", "= _ConditionProxy(\"uid\", operator=Operator(\"=\")) class UlistCondition(UlistLabelsCondition): \"\"\" A class storing all the attributes `Ulist`", "_ConditionProxy(\"tags\", Operator.fill_some()) TAGS_ARRAY: t.Final[_ConditionProxy] = TAGS __slots__ = () class ReleaseCondition(BaseCondition): \"\"\" A", "Attribute | Field Value Type | Operations Supported | Description | |-----------|-----------------------------------|----------------------|-------------------------------------------------------------| |", "Performs a search on the name, original and aliases fields. | | VN", "the array of languages, the release is available in. | | PLATFORMS |", "value for `RELEASED`. | | RELEASED_DATE | date | *ALL* | Filter using", "only that operator is supported. I hope you understand the above. :) Tip:", "[typing.Iterable][] of [str][]s | *SOME* | Filter using the array of languages, the", "-> Operator: \"\"\" A factory method for creating an Operator object with all", "information. | Attribute | Field Value Type | Operations Supported | Description |", "the character. Can't use `%` with `None`. | | SEARCH | [str][] |", "\"\"\" A base class storing the comman condition attributes. Tip: `ALL` below means", "languages, the VN is available in. | | FIRST_CHAR | [None][] or [str][]", "of languages of producer. | | SEARCH | [str][] | *(%)* | Performs", "Filter using the Catalog number. | | LANGUAGES | [str][] | *SOME* |", "date of the VN. | | PATCH | [bool][] | *(==)* | Check", "aliases fields. | | VN | [int][] | *(==)* | Find characters linked", "means only that operator is supported. I hope you understand the above. :)", "condition_map = { \"vn\": VNCondition, \"release\": ReleaseCondition, \"producer\": ProducerCondition, \"character\": CharacterCondition, \"staff\": StaffCondition,", "`Producer` type supports as condition. Hint: Check the `BaseCondition` class for more information.", "USERNAME_ARRAY | A [typing.Iterable][] of [str][]s | *(==)* | Find user using an", "NAME | [str][] | *SOME + (%)* | Find using name of producer.", "| Find staff by an array of alias IDs. | | SEARCH |", "users. \"\"\" __slots__ = (\"symbols\",) def __init__(self, *symbols: str) -> None: \"\"\" Operator", "[int][] | *SOME* | Filter using the JAN/UPC/EAN code. | | CATALOG |", "IDs. | | LABEL | [int][] | *(==)* | Label assigned to the", "__future__ import annotations import typing as t from .proxy import _ConditionProxy from ..objects", "is a patch. | | FREEWARE | [bool][] | *(==)* | Check if", "operator=Operator(\"=\")) TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some() ) GTIN: t.Final[_ConditionProxy] = _ConditionProxy( \"gtin\",", "the VN. | \"\"\" # noqa: E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_all()) VN_ARRAY:", "Check the `BaseCondition` class for more information. | Attribute | Field Value Type", "| Description | |--------------|-----------------------------------|----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | NAME | [str] | *SOME + (%)* |", "Filter using the array of languages, the VN is available in. | |", "| [str][] | *SOME* | Filter using type of producer. | | LANGUAGE", "an array of PLATFORMS. | | RELEASED | [None][] | *SOME* | Filter", "Filter using the language, the VN is available in. | | LANGUAGES_ARRAY |", "..interface import T __all__ = ( \"VNCondition\", \"BaseCondition\", \"ReleaseCondition\", \"ProducerCondition\", \"CharacterCondition\", \"StaffCondition\", \"QuoteCondition\",", "all the attributes `User` type supports as condition. Hint: Check the `BaseCondition` class", "\"date\", operator=Operator.fill_all() ) PATCH: t.Final[_ConditionProxy] = _ConditionProxy(\"patch\", operator=Operator(\"=\")) FREEWARE: t.Final[_ConditionProxy] = _ConditionProxy( \"freeware\",", "all the attributes `UlistLabels` type supports as condition. Info: This class doesn't inherit", "E501 AID: t.Final[_ConditionProxy] = _ConditionProxy(\"aid\", operator=Operator(\"=\")) AID_ARRAY: t.Final[_ConditionProxy] = AID SEARCH: t.Final[_ConditionProxy] =", "`!=`, `>`, `<`, `>=`, `<=`) are supported. `SOME` means only operators (`==`, `!=`)", "| Label assigned to the VN. | \"\"\" # noqa: E501 VN: t.Final[_ConditionProxy]", "\"\"\" A class storing all the attributes `Release` type supports as condition. Hint:", "*(%)* | Search for the VN using it's title and releases. | |", "| \"\"\" # noqa: E501 NAME: t.Final[_ConditionProxy] = _ConditionProxy( \"name\", operator=Operator.fill_some(\"~\") ) ORIGINAL:", "name of producer. | | ORIGINAL | [None][] or [str][] | *SOME +", "| Field Value Type | Operations Supported | Description | |--------------|-----------------------------------|----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | NAME", "for XCondition attributes to check condition support. Warning: This object is not meant", ") TRAITS_ARRAY: t.Final[_ConditionProxy] = TRAITS __slots__ = () class StaffCondition(BaseCondition): \"\"\" A class", "| ORIGINAL | [None][] or [str][] | *SOME + (%)* | Find using", "| Filter using the array of languages, the release is available in. |", "AID_ARRAY | A [typing.Iterable][] of [int][]s | *(==)* | Find staff by an", "_ConditionProxy( \"type\", operator=Operator.fill_some() ) GTIN: t.Final[_ConditionProxy] = _ConditionProxy( \"gtin\", operator=Operator.fill_some() ) CATALOG: t.Final[_ConditionProxy]", "Filter using an array of languages of producer. | | SEARCH | [str][]", "LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES FIRSTCHAR: t.Final[_ConditionProxy]", "t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some() ) GTIN: t.Final[_ConditionProxy] = _ConditionProxy( \"gtin\", operator=Operator.fill_some() )", "operator=Operator(\"~\")) __slots__ = () class CharacterCondition(BaseCondition): \"\"\" A class storing all the attributes", "`None`) | | RELEASED | [None][] | *SOME* | Filter using a `None`", "Tip: All `X_ARRAY` fields must be conditioned against an Iterable of values and", "ReleaseCondition, \"producer\": ProducerCondition, \"character\": CharacterCondition, \"staff\": StaffCondition, \"quote\": QuoteCondition, \"user\": UserCondition, \"ulist-labels\": UlistLabelsCondition,", "Attribute | Field Value Type | Operations Supported | Description | |-----------|------------------|----------------------|------------------------------------------------------------------------------------------| |", "class CharacterCondition(BaseCondition): \"\"\" A class storing all the attributes `Character` type supports as", "the given traits. | \"\"\" # noqa: E501 NAME: t.Final[_ConditionProxy] = _ConditionProxy( \"name\",", "supported. For example: `|BaseCondition.ID| ALL |` supports (`==`, `!=`, `>`, `<`, `>=`, `<=`)", "Hint: Check the `BaseCondition` class for more information. | Attribute | Field Value", "[int][] | *SOME* | Find VNs by tag. | | TAGS_ARRAY | A", "| | TYPE | [str][] | *SOME* | Filter using type of producer.", "means only operators (`==`, `!=`) are supported. `SOME + X` means `SOME` and", "fill_all(cls, *symbols: str) -> Operator: \"\"\" A factory method for creating an Operator", "the producer. Can't use `%` with `None`. | | TYPE | [str][] |", "the release is available in. | | PLATFORMS | [str][] | *SOME* |", "release is a doujin. | | TYPE | [str][] | *SOME* | Filter", "symbols @classmethod def fill_some(cls, *symbols: str) -> Operator: \"\"\" A factory method for", "symbols of the operator. \"\"\" self.symbols = symbols @classmethod def fill_some(cls, *symbols: str)", "_ConditionProxy( \"date\", operator=Operator.fill_all() ) PATCH: t.Final[_ConditionProxy] = _ConditionProxy(\"patch\", operator=Operator(\"=\")) FREEWARE: t.Final[_ConditionProxy] = _ConditionProxy(", "str) -> None: \"\"\" Operator constructor. Args: *symbols (str): The symbols of the", "the VN using it's title and releases. | | TAGS | [int][] |", "(%)* | Find using name of character. | | ORIGINAL | [None][] or", "UlistCondition(UlistLabelsCondition): \"\"\" A class storing all the attributes `Ulist` type supports as condition.", "`=`, `!=`, `>`, `<`, `>=`, `<=` symbols. \"\"\" return cls(\"=\", \"!=\", \">\", \">=\",", "comman condition attributes. Tip: `ALL` below means all operators (`==`, `!=`, `>`, `<`,", "operator=Operator(\"~\")) VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator(\"=\")) VN_ARRAY: t.Final[_ConditionProxy] = VN TRAITS: t.Final[_ConditionProxy] =", "t.Final[_ConditionProxy] = _ConditionProxy( \"catalog\", operator=Operator.fill_some() ) LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some() )", "supports (`==`, `!=`, `%`) operators. If there is neither `ALL` nor `SOME` in", "= _ConditionProxy( \"type\", operator=Operator.fill_some() ) LANGUAGE: t.Final[_ConditionProxy] = _ConditionProxy( \"language\", operator=Operator.fill_some() ) LANGUAGES_ARRAY:", "Type | Operations Supported | Description | |----------------|-----------------------------------|----------------------|----------------------------------------| | USERNAME | [str][] |", "supports `ID` and `ID_ARRAY` filters of `BaseCondition`. \"\"\" # noqa: E501 __slots__ =", "Operations Supported | Description | |-----------|------------------|----------------------|------------------------------------------------------------------------------------------| | UID | [int][] | *(==)* |", "the `BaseCondition` class for more information. Info: This one only supports `ID` and", "(t.Tuple[str]): The symbols of the operator. \"\"\" self.symbols = symbols @classmethod def fill_some(cls,", "\"firstchar\", operator=Operator.fill_some() ) ORIG_LANG: t.Final[_ConditionProxy] = _ConditionProxy( \"orig_lang\", operator=Operator.fill_some() ) ORIG_LANG_ARRAY: t.Final[_ConditionProxy] =", "symbols. Args: *symbols (str): The additional symbols of the operator. Returns: Operator: The", "*SOME* | Filter using an array of PLATFORMS. | | PLATFORMS_ARRAY | A", "an array of alias IDs. | | SEARCH | [str][] | *(%)* |", "NAME: t.Final[_ConditionProxy] = _ConditionProxy( \"name\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\")", "noqa: E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_some()) VN_ARRAY: t.Final[_ConditionProxy] = VN PRODUCER: t.Final[_ConditionProxy]", "RELEASED_DATE | date | *ALL* | Filter using the release date of the", "Operations Supported | Description | |----------------|-----------------------------------|----------------------|----------------------------------------| | USERNAME | [str][] | *SOME +", "| | PLATFORMS_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* | Filter using", "| VN_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* | Find using an", "The symbols of the operator. \"\"\" self.symbols = symbols @classmethod def fill_some(cls, *symbols:", "VN | [int][] | *ALL* | Find releases linked to the given visual", "Find releases linked to the given producer ID. | | TITLE | [str][]", "their username. | | USERNAME_ARRAY | A [typing.Iterable][] of [str][]s | *(==)* |", "t.Final[_ConditionProxy] = _ConditionProxy( \"gtin\", operator=Operator.fill_some() ) CATALOG: t.Final[_ConditionProxy] = _ConditionProxy( \"catalog\", operator=Operator.fill_some() )", "Description | |--------------|-----------------------------------|----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | NAME | [str] | *SOME + (%)* | Find", "*(==)* | Check if the release is a doujin. | | TYPE |", "For example: `|BaseCondition.ID| ALL |` supports (`==`, `!=`, `>`, `<`, `>=`, `<=`) operators.", "| | PLATFORMS | [None][] or [str][] | *SOME* | Filter using the", "language, the release is available in. | | LANGUAGES_ARRAY | A [typing.Iterable][] of", "XCondition attributes to check condition support. Warning: This object is not meant to", "`BaseCondition` class for more information. | Attribute | Field Value Type | Operations", "using the type of release. | | GTIN | [int][] | *SOME* |", "using an `ID` | | ID_ARRAY | A [typing.Iterable][] of [int][]s | SOME", "| \"\"\" # noqa: E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_all()) VN_ARRAY: t.Final[_ConditionProxy] =", "ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some()", "VN_ARRAY: t.Final[_ConditionProxy] = VN TRAITS: t.Final[_ConditionProxy] = _ConditionProxy( \"traits\", operator=Operator.fill_some() ) TRAITS_ARRAY: t.Final[_ConditionProxy]", "`SOME + X` means `SOME` and `X` operators are supported. For example: `|BaseCondition.ID|", "| VN | [int][] | *ALL* | Find by visual novel ID. |", "created by users. \"\"\" __slots__ = (\"symbols\",) def __init__(self, *symbols: str) -> None:", "| |-----------|------------------|----------------------|------------------------------------------------------------------------------------------| | UID | [int][] | *(==)* | Find using user ID.", "A [typing.Iterable][] of [str][]s | *(==)* | Find user using an array of", "Label assigned to the VN. | \"\"\" # noqa: E501 VN: t.Final[_ConditionProxy] =", "= _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = () class CharacterCondition(BaseCondition): \"\"\" A class storing all", "| *SOME* | Find using an array of visual novel IDs. | |", "[int][] | *(==)* | Find characters linked to the given visual novel ID.", "= PLATFORMS __slots__ = () class ProducerCondition(BaseCondition): \"\"\" A class storing all the", "symbols (t.Tuple[str]): The symbols of the operator. \"\"\" self.symbols = symbols @classmethod def", "\"\"\" # noqa: E501 UID: t.Final[_ConditionProxy] = _ConditionProxy(\"uid\", operator=Operator(\"=\")) class UlistCondition(UlistLabelsCondition): \"\"\" A", "() def _condition_selector( type: t.Type[T], ): condition_map = { \"vn\": VNCondition, \"release\": ReleaseCondition,", "This one only supports `ID` and `ID_ARRAY` filters of `BaseCondition`. \"\"\" # noqa:", "of the VN. | | LANGUAGES | [None][] or [str][] | *SOME* |", "| TYPE | [str][] | *SOME* | Filter using the type of release.", "*SOME + (%)* | Filter using the TITLE Field. | | PLATFORMS |", "\"UserCondition\", \"UlistLabelsCondition\", \"UlistCondition\", \"_condition_selector\", ) class Operator: \"\"\" An object for storing operators", "t.Final[_ConditionProxy] = _ConditionProxy(\"aid\", operator=Operator(\"=\")) AID_ARRAY: t.Final[_ConditionProxy] = AID SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\"))", "A [typing.Iterable][] of [str][]s | *SOME* | Filter using an array of the", "# noqa: E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_some()) VN_ARRAY: t.Final[_ConditionProxy] = VN PRODUCER:", "symbols. \"\"\" return cls(\"=\", \"!=\", \">\", \">=\", \"<\", \"<=\", *symbols) class BaseCondition: \"\"\"", "None: \"\"\" Operator constructor. Args: *symbols (str): The symbols of the operator. Attributes:", "Operations Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------------------------------| | TITLE | [str][] | *SOME +", "as condition. Hint: Check the `UlistLabelsCondition` class for more information. | Attribute |", "PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS RELEASED: t.Final[_ConditionProxy]", "Filter using an array of PLATFORMS. | | PLATFORMS_ARRAY | A [typing.Iterable][] of", "= () class StaffCondition(BaseCondition): \"\"\" A class storing all the attributes `Staff` type", "|` supports only (`==`, `!=`) operators. `|UserCondition.USERNAME| SOME + (%)|` supports (`==`, `!=`,", "__slots__ = () class CharacterCondition(BaseCondition): \"\"\" A class storing all the attributes `Character`", "Find using name of character. | | ORIGINAL | [None][] or [str][] |", "| *ALL* | Filter using the release date of the VN. | |", "operator=Operator(\"=\")) class UlistCondition(UlistLabelsCondition): \"\"\" A class storing all the attributes `Ulist` type supports", "an Iterable of values and these fields yield an iterable of objects which", "assigned to the VN. | \"\"\" # noqa: E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\",", "| LANGUAGES | [None][] or [str][] | *SOME* | Filter using the language,", "any of the given traits. | \"\"\" # noqa: E501 NAME: t.Final[_ConditionProxy] =", "support. Warning: This object is not meant to be created by users. \"\"\"", "using an array of `ID`s.| \"\"\" # noqa: E501 ID: t.Final[_ConditionProxy] = _ConditionProxy(\"id\",", "PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS __slots__ = () class ProducerCondition(BaseCondition): \"\"\" A class storing", "Info: This method fills the `=`, `!=`, `>`, `<`, `>=`, `<=` symbols. \"\"\"", "ORIG_LANG_ARRAY: t.Final[_ConditionProxy] = ORIG_LANG SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) TAGS: t.Final[_ConditionProxy] = _ConditionProxy(\"tags\",", "= _ConditionProxy( \"vn\", operator=Operator.fill_some() ) LABEL: t.Final[_ConditionProxy] = _ConditionProxy(\"label\", operator=Operator(\"~\")) __slots__ = ()", "understand the above. :) Tip: `Field Value Type` means the type of value", "using the title. | | ORIGINAL | [None][] or [str][] | *SOME +", "AID: t.Final[_ConditionProxy] = _ConditionProxy(\"aid\", operator=Operator(\"=\")) AID_ARRAY: t.Final[_ConditionProxy] = AID SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\",", "This method fills the `=` and `!=` symbols. \"\"\" return cls(\"=\", \"!=\", *symbols)", "| ORIG_LANG | [str][] | *SOME* | Filter using the original language of", "Tip: `ALL` below means all operators (`==`, `!=`, `>`, `<`, `>=`, `<=`) are", "| ORIG_LANG_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* | Filter using an", "= ORIG_LANG SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) TAGS: t.Final[_ConditionProxy] = _ConditionProxy(\"tags\", Operator.fill_some()) TAGS_ARRAY:", "the attributes `Producer` type supports as condition. Hint: Check the `BaseCondition` class for", "`ID`s.| \"\"\" # noqa: E501 ID: t.Final[_ConditionProxy] = _ConditionProxy(\"id\", operator=Operator.fill_all()) ID_ARRAY: t.Final[_ConditionProxy] =", "_ConditionProxy(\"vn\", operator=Operator.fill_some()) VN_ARRAY: t.Final[_ConditionProxy] = VN PRODUCER: t.Final[_ConditionProxy] = _ConditionProxy( \"producer\", operator=Operator(\"=\") )", "*SOME* | Filter using the original language of the VN. | | ORIG_LANG_ARRAY", "| [int][] | *SOME* | Filter using the JAN/UPC/EAN code. | | CATALOG", ") LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGE SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = ()", "| Filter using the PLATFORMS field. | | PLATFORMS_ARRAY | A [typing.Iterable][] of", "| [str][] | *SOME + (%)* | Find using name of producer. |", "any (not all) of the given traits, the `!=` filter will return chars", "`None` value for `RELEASED`. | | RELEASED_DATE | date | *ALL* | Filter", "ID | [int][] | ALL | Filter using an `ID` | | ID_ARRAY", "| *SOME* | Filter using type of producer. | | LANGUAGE | [str][]", "= () class QuoteCondition(BaseCondition): \"\"\" A class storing all the attributes `Staff` type", "VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator(\"=\")) VN_ARRAY: t.Final[_ConditionProxy] = VN TRAITS: t.Final[_ConditionProxy] = _ConditionProxy(", "`Staff` type supports as condition. Hint: Check the `BaseCondition` class for more information.", "type supports as condition. Hint: Check the `UlistLabelsCondition` class for more information. |", "\"QuoteCondition\", \"UserCondition\", \"UlistLabelsCondition\", \"UlistCondition\", \"_condition_selector\", ) class Operator: \"\"\" An object for storing", "condition. Hint: Check the `BaseCondition` class for more information. Info: This one only", "_ConditionProxy(\"search\", operator=Operator(\"~\")) TAGS: t.Final[_ConditionProxy] = _ConditionProxy(\"tags\", Operator.fill_some()) TAGS_ARRAY: t.Final[_ConditionProxy] = TAGS __slots__ =", "_ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = () class CharacterCondition(BaseCondition): \"\"\" A class storing all the", "{ \"vn\": VNCondition, \"release\": ReleaseCondition, \"producer\": ProducerCondition, \"character\": CharacterCondition, \"staff\": StaffCondition, \"quote\": QuoteCondition,", "Hint: Check the `BaseCondition` class for more information. Info: This one only supports", "| *SOME* | Filter using the array of languages, the VN is available", "| [str][] | *SOME + (%)* | Filter using the TITLE Field. |", "`!=`) are supported. `SOME + X` means `SOME` and `X` operators are supported.", "noqa: E501 TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy(", "operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\",", "array of usernames. | \"\"\" # noqa: E501 USERNAME: t.Final[_ConditionProxy] = _ConditionProxy( \"username\",", "_ConditionProxy( \"gtin\", operator=Operator.fill_some() ) CATALOG: t.Final[_ConditionProxy] = _ConditionProxy( \"catalog\", operator=Operator.fill_some() ) LANGUAGES: t.Final[_ConditionProxy]", "| | AID_ARRAY | A [typing.Iterable][] of [int][]s | *(==)* | Find staff", "Type | Operations Supported | Description | |-----------|------------------|----------------------|------------------------------------------------------------------------------------------| | UID | [int][] |", "| | LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* | Filter using", "PRODUCER: t.Final[_ConditionProxy] = _ConditionProxy( \"producer\", operator=Operator(\"=\") ) TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\")", "Iterable of values and these fields yield an iterable of objects which match", "Find using user ID. The special value '0' is recognized as the currently", "= _ConditionProxy( \"freeware\", operator=Operator(\"=\") ) DOUJIN: t.Final[_ConditionProxy] = _ConditionProxy(\"doujin\", operator=Operator(\"=\")) TYPE: t.Final[_ConditionProxy] =", "class doesn't inherit from `BaseCondition` and doesn't have `ID` and `ID_ARRAY` filters. |", "| DOUJIN | [bool][] | *(==)* | Check if the release is a", "| Operations Supported | Description | |--------------|-----------------------------------|----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | NAME | [str] | *SOME", "Tip: `Field Value Type` means the type of value against which the field", "A [typing.Iterable][] of [int][]s | *SOME* | Find VNs using an array of", "RELEASED | [None][] | *SOME* | Filter using a `None` value for `RELEASED`.", "PLATFORMS. | | RELEASED | [None][] | *SOME* | Filter using a `None`", "VN PRODUCER: t.Final[_ConditionProxy] = _ConditionProxy( \"producer\", operator=Operator(\"=\") ) TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\",", "title. (`%` operation not supported for `None`) | | RELEASED | [None][] |", "are supported. `SOME` means only operators (`==`, `!=`) are supported. `SOME + X`", "[str][]s | *SOME* | Filter using an array of the original languages of", "_ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS __slots__ = () class ProducerCondition(BaseCondition):", "*SOME* | Filter using a `None` value for `RELEASED`. | | RELEASED_DATE |", "[typing.Iterable][] of [int][]s | *(==)* | Find staff by an array of alias", "GTIN | [int][] | *SOME* | Filter using the JAN/UPC/EAN code. | |", "| [int][] | *(==)* | Find characters linked to the given visual novel", "available in. | | PLATFORMS | [str][] | *SOME* | Filter using an", "= _ConditionProxy( \"language\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGE SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\",", "\"\"\" A factory method for creating an Operator object with all symbols. Args:", "_ConditionProxy( \"vn\", operator=Operator.fill_some() ) LABEL: t.Final[_ConditionProxy] = _ConditionProxy(\"label\", operator=Operator(\"~\")) __slots__ = () def", "*SOME + (%)* | Find the release using the original/official title. (`%` operation", "= _ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS __slots__ = () class", "the title. | | ORIGINAL | [None][] or [str][] | *SOME + (%)*", "a freeware. | | DOUJIN | [bool][] | *(==)* | Check if the", "_ConditionProxy( \"type\", operator=Operator.fill_some() ) LANGUAGE: t.Final[_ConditionProxy] = _ConditionProxy( \"language\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy]", "currently logged in user. | \"\"\" # noqa: E501 UID: t.Final[_ConditionProxy] = _ConditionProxy(\"uid\",", "\"CharacterCondition\", \"StaffCondition\", \"QuoteCondition\", \"UserCondition\", \"UlistLabelsCondition\", \"UlistCondition\", \"_condition_selector\", ) class Operator: \"\"\" An object", "object is not meant to be created by users. \"\"\" __slots__ = (\"symbols\",)", "= _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = () class QuoteCondition(BaseCondition): \"\"\" A class storing all", "Operations Supported | Description | |-----------------|-----------------------------------|----------------------|----------------------------------------------------------------------------------------------| | VN | [int][] | *ALL* |", "import annotations import typing as t from .proxy import _ConditionProxy from ..objects import", "using the TITLE Field. | | PLATFORMS | [None][] or [str][] | *SOME*", "method fills the `=` and `!=` symbols. \"\"\" return cls(\"=\", \"!=\", *symbols) @classmethod", "doesn't inherit from `BaseCondition` and doesn't have `ID` and `ID_ARRAY` filters. | Attribute", "but an operator is specified, then that means only that operator is supported.", "AID_ARRAY: t.Final[_ConditionProxy] = AID SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = () class", "| *(==)* | Find user using an array of usernames. | \"\"\" #", "X` means `SOME` and `X` operators are supported. For example: `|BaseCondition.ID| ALL |`", "_ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\",", "Find characters linked to the given visual novel ID. | | VN_ARRAY |", "| Attribute | Field Value Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|----------------------------------------------------------------------------------------------|", "E501 __slots__ = () class UserCondition(BaseCondition): \"\"\" A class storing all the attributes", "ORIGINAL | [None][] or [str][] | *SOME + (%)* | Find the release", "objects which match the values from the API. | Field | Field Value", "): condition_map = { \"vn\": VNCondition, \"release\": ReleaseCondition, \"producer\": ProducerCondition, \"character\": CharacterCondition, \"staff\":", "The additional symbols of the operator. Returns: Operator: The created Operator object. Info:", "class VNCondition(BaseCondition): \"\"\" A class storing all the attributes `VN` type supports as", "Operator object with all symbols. Args: *symbols (str): The additional symbols of the", "(`==`, `!=`) operators. `|UserCondition.USERNAME| SOME + (%)|` supports (`==`, `!=`, `%`) operators. If", "class storing all the attributes `Character` type supports as condition. Hint: Check the", "Filter using language of producer. | | LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s", "that operator is supported. I hope you understand the above. :) Tip: `Field", "array of tags. \"\"\" # noqa: E501 TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\")", "| Operations Supported | Description | |-----------------|-----------------------------------|----------------------|----------------------------------------------------------------------------------------------| | VN | [int][] | *ALL*", "| *SOME* | Find all the releases linked to the given visual novel", "| *SOME* | Filter using an array of PLATFORMS. | \"\"\" # noqa:", "be conditioned. Tip: All `X_ARRAY` fields must be conditioned against an Iterable of", ") LANGUAGE: t.Final[_ConditionProxy] = _ConditionProxy( \"language\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGE SEARCH:", "`ALL` below means all operators (`==`, `!=`, `>`, `<`, `>=`, `<=`) are supported.", "Check the `UlistLabelsCondition` class for more information. | Attribute | Field Value Type", "Filter using the type of release. | | GTIN | [int][] | *SOME*", "VN using it's title and releases. | | TAGS | [int][] | *SOME*", "storing all the attributes `VN` type supports as condition. Hint: Check the `BaseCondition`", "using the array of languages, the release is available in. | | PLATFORMS", "example: `|BaseCondition.ID| ALL |` supports (`==`, `!=`, `>`, `<`, `>=`, `<=`) operators. `|BaseCondition.ID_ARRAY|", "ORIGINAL | [None][] or [str][] | *SOME + (%)* | Find using original/official", "condition but an operator is specified, then that means only that operator is", "_ConditionProxy( \"language\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGE SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\"))", "_ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"released\",", "of [str][]s | *SOME* | Filter using an array of the original languages", "using an array of visual novel IDs. | | LABEL | [int][] |", "| *SOME* | Filter using the language, the release is available in. |", "are not linked to any of the given traits. | \"\"\" # noqa:", "| Filter using the language, the release is available in. | | LANGUAGES_ARRAY", ") class Operator: \"\"\" An object for storing operators for XCondition attributes to", "of the given traits. | \"\"\" # noqa: E501 NAME: t.Final[_ConditionProxy] = _ConditionProxy(", "| *SOME* | The `=` filter will return chars that are linked to", "\"producer\", operator=Operator(\"=\") ) TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] =", "None to match all the vn not starting with an alphabet. | |", "(`==`, `!=`, `>`, `<`, `>=`, `<=`) operators. `|BaseCondition.ID_ARRAY| SOME |` supports only (`==`,", "first character of the VN or None to match all the vn not", "VN is available in. | | FIRST_CHAR | [None][] or [str][] | *SOME*", "GTIN: t.Final[_ConditionProxy] = _ConditionProxy( \"gtin\", operator=Operator.fill_some() ) CATALOG: t.Final[_ConditionProxy] = _ConditionProxy( \"catalog\", operator=Operator.fill_some()", "fields. | \"\"\" # noqa: E501 NAME: t.Final[_ConditionProxy] = _ConditionProxy( \"name\", operator=Operator.fill_some(\"~\") )", "be conditioned against an Iterable of values and these fields yield an iterable", "name of character. | | ORIGINAL | [None][] or [str][] | *SOME +", "| Find user by their username. | | USERNAME_ARRAY | A [typing.Iterable][] of", "| SOME | Filter using an array of `ID`s.| \"\"\" # noqa: E501", "or [str][] | *SOME* | Filter using the first character of the VN", "`>`, `<`, `>=`, `<=` symbols. \"\"\" return cls(\"=\", \"!=\", \">\", \">=\", \"<\", \"<=\",", "\"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some()", "created Operator object. Info: This method fills the `=`, `!=`, `>`, `<`, `>=`,", "ALL |` supports (`==`, `!=`, `>`, `<`, `>=`, `<=`) operators. `|BaseCondition.ID_ARRAY| SOME |`", "the array of languages, the VN is available in. | | FIRST_CHAR |", "*SOME* | Filter using the array of languages, the release is available in.", "Filter using an array of PLATFORMS. | \"\"\" # noqa: E501 VN: t.Final[_ConditionProxy]", "operators are supported. For example: `|BaseCondition.ID| ALL |` supports (`==`, `!=`, `>`, `<`,", "| Filter using the release date of the VN. | | PATCH |", "are supported. `SOME + X` means `SOME` and `X` operators are supported. For", "the Catalog number. | | LANGUAGES | [str][] | *SOME* | Filter using", "(`==`, `!=`, `%`) operators. If there is neither `ALL` nor `SOME` in the", "the VN is available in. | | LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s", "_ConditionProxy(\"aid\", operator=Operator(\"=\")) AID_ARRAY: t.Final[_ConditionProxy] = AID SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ =", "VNs using an array of tags. \"\"\" # noqa: E501 TITLE: t.Final[_ConditionProxy] =", "[str][] | *SOME + (%)* | Find using original/official name of the producer.", "[None][] or [str][] | *SOME* | Filter using the language, the VN is", "starting with an alphabet. | | ORIG_LANG | [str][] | *SOME* | Filter", "__all__ = ( \"VNCondition\", \"BaseCondition\", \"ReleaseCondition\", \"ProducerCondition\", \"CharacterCondition\", \"StaffCondition\", \"QuoteCondition\", \"UserCondition\", \"UlistLabelsCondition\", \"UlistCondition\",", "| Operations Supported | Description | |----------|-----------------------------------|----------------------|--------------------------------| | ID | [int][] | ALL", "\"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES FIRSTCHAR: t.Final[_ConditionProxy] = _ConditionProxy( \"firstchar\", operator=Operator.fill_some()", "title and releases. | | TAGS | [int][] | *SOME* | Find VNs", ") RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_all() ) LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\",", "array of `ID`s.| \"\"\" # noqa: E501 ID: t.Final[_ConditionProxy] = _ConditionProxy(\"id\", operator=Operator.fill_all()) ID_ARRAY:", "of [str][]s | *SOME* | Filter using an array of PLATFORMS. | |", "of [int][]s | SOME | Filter using an array of `ID`s.| \"\"\" #", "traits. | \"\"\" # noqa: E501 NAME: t.Final[_ConditionProxy] = _ConditionProxy( \"name\", operator=Operator.fill_some(\"~\") )", "| ALL | Filter using an `ID` | | ID_ARRAY | A [typing.Iterable][]", "| | ID_ARRAY | A [typing.Iterable][] of [int][]s | SOME | Filter using", "Supported | Description | |-----------|-----------------------------------|----------------------|-------------------------------------------------------------| | AID | [int][] | *(==)* | Find", "Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------| | NAME | [str][] | *SOME + (%)* | Find", "Find using original/official name of the producer. Can't use `%` with `None`. |", "= _ConditionProxy( \"traits\", operator=Operator.fill_some() ) TRAITS_ARRAY: t.Final[_ConditionProxy] = TRAITS __slots__ = () class", "*(==)* | Find characters linked to the given visual novel ID array. |", "original and aliases fields. | \"\"\" # noqa: E501 NAME: t.Final[_ConditionProxy] = _ConditionProxy(", "and `!=` symbols. \"\"\" return cls(\"=\", \"!=\", *symbols) @classmethod def fill_all(cls, *symbols: str)", "`Release` type supports as condition. Hint: Check the `BaseCondition` class for more information.", "operator=Operator.fill_some() ) PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS __slots__ = () class ProducerCondition(BaseCondition): \"\"\" A", "| [int][] | *(==)* | Find using user ID. The special value '0'", "t.Final[_ConditionProxy] = LANGUAGE SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = () class CharacterCondition(BaseCondition):", ":) Tip: `Field Value Type` means the type of value against which the", "= () class UlistLabelsCondition: \"\"\" A class storing all the attributes `UlistLabels` type", "`BaseCondition` class for more information. Info: This one only supports `ID` and `ID_ARRAY`", "TAGS_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* | Find VNs using an", "Type | Operations Supported | Description | |-----------|-----------------------------------|----------------------|-------------------------------------------------------------| | AID | [int][] |", "supported. `SOME + X` means `SOME` and `X` operators are supported. For example:", "= () class VNCondition(BaseCondition): \"\"\" A class storing all the attributes `VN` type", "the `BaseCondition` class for more information. | Attribute | Field Value Type |", "of [str][]s | *SOME* | Filter using the array of languages, the release", "TITLE | [str][] | *SOME + (%)* | Filter using the TITLE Field.", "| *(==)* | Find staff by alias ID. | | AID_ARRAY | A", "and `ID_ARRAY` filters. | Attribute | Field Value Type | Operations Supported |", "LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES FIRSTCHAR: t.Final[_ConditionProxy] = _ConditionProxy( \"firstchar\", operator=Operator.fill_some() ) ORIG_LANG: t.Final[_ConditionProxy]", "| UID | [int][] | *(==)* | Find using user ID. The special", "\"\"\" # noqa: E501 TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) PLATFORMS: t.Final[_ConditionProxy]", "given visual novel ID array. | | TRAITS | [int][] | *SOME* |", "\"\"\" return cls(\"=\", \"!=\", \">\", \">=\", \"<\", \"<=\", *symbols) class BaseCondition: \"\"\" A", "| [int][] | *SOME* | Find characters by trait. | | TRAITS_ARRAY |", "UserCondition, \"ulist-labels\": UlistLabelsCondition, \"ulist\": UlistCondition, } cls = condition_map[ type.__name__.lower() if type !=", "novel ID array. | | TRAITS | [int][] | *SOME* | Find characters", "| A [typing.Iterable][] of [int][]s | *SOME* | Find VNs using an array", "PATCH: t.Final[_ConditionProxy] = _ConditionProxy(\"patch\", operator=Operator(\"=\")) FREEWARE: t.Final[_ConditionProxy] = _ConditionProxy( \"freeware\", operator=Operator(\"=\") ) DOUJIN:", "operator=Operator(\"~\")) __slots__ = () class QuoteCondition(BaseCondition): \"\"\" A class storing all the attributes", "\"username\", operator=Operator.fill_some(\"~\") ) USERNAME_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator(\"=\") ) __slots__ = ()", "Value Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|----------------------------------------------------------------------------------------------| | VN | [int][]", "RELEASED_DATE | [datetime.date][] | *ALL* | Filter using the release date of the", "+ (%)* | Find user by their username. | | USERNAME_ARRAY | A", "the VN is available in. | | FIRST_CHAR | [None][] or [str][] |", "these fields yield an iterable of objects which match the values from the", "= _ConditionProxy(\"search\", operator=Operator(\"~\")) TAGS: t.Final[_ConditionProxy] = _ConditionProxy(\"tags\", Operator.fill_some()) TAGS_ARRAY: t.Final[_ConditionProxy] = TAGS __slots__", "import _ConditionProxy from ..objects import UlistLabels if t.TYPE_CHECKING: from ..interface import T __all__", "Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|----------------------------------------------------------------------------------------------| | VN | [int][] |", "| Filter using an array of `ID`s.| \"\"\" # noqa: E501 ID: t.Final[_ConditionProxy]", "| *SOME* | Filter using the first character of the VN or None", "value for `RELEASED`. | | RELEASED_DATE | [datetime.date][] | *ALL* | Filter using", "= () class CharacterCondition(BaseCondition): \"\"\" A class storing all the attributes `Character` type", "Can't use `%` with `None`. | | TYPE | [str][] | *SOME* |", "Description | |----------|-----------------------------------|----------------------|--------------------------------| | ID | [int][] | ALL | Filter using an", "|-----------|-----------------------------------|----------------------|------------------------------------------| | VN | [int][] | *ALL* | Find by visual novel ID.", "LANGUAGE | [str][] | *SOME* | Filter using language of producer. | |", "storing operators for XCondition attributes to check condition support. Warning: This object is", "PLATFORMS | [None][] or [str][] | *SOME* | Filter using the PLATFORMS field.", "[typing.Iterable][] of [int][]s | *SOME* | Find all the releases linked to the", "*ALL* | Find releases linked to the given visual novel ID. | |", "\">\", \">=\", \"<\", \"<=\", *symbols) class BaseCondition: \"\"\" A base class storing the", "`VN` type supports as condition. Hint: Check the `BaseCondition` class for more information.", "operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) TYPE: t.Final[_ConditionProxy] = _ConditionProxy(", "the type of value against which the field should be conditioned. Tip: All", "to the given visual novel IDs in the array. | | PRODUCER |", "you understand the above. :) Tip: `Field Value Type` means the type of", "[int][]s | *SOME* | Find all the releases linked to the given visual", "date | *ALL* | Filter using the release date of the VN. |", "() class VNCondition(BaseCondition): \"\"\" A class storing all the attributes `VN` type supports", "ID. The special value '0' is recognized as the currently logged in user.", "| *(==)* | Check if the release is a doujin. | | TYPE", "= _ConditionProxy(\"label\", operator=Operator(\"~\")) __slots__ = () def _condition_selector( type: t.Type[T], ): condition_map =", "ID. | | AID_ARRAY | A [typing.Iterable][] of [int][]s | *(==)* | Find", "VN_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* | Find all the releases", "filter will return chars that are not linked to any of the given", "annotations import typing as t from .proxy import _ConditionProxy from ..objects import UlistLabels", "__slots__ = (\"symbols\",) def __init__(self, *symbols: str) -> None: \"\"\" Operator constructor. Args:", "| *SOME + (%)* | Find using original/official name of the producer. Can't", "| A [typing.Iterable][] of [int][]s | SOME | Filter using an array of", "| Filter using the JAN/UPC/EAN code. | | CATALOG | [str][] | *SOME*", "| *SOME* | Filter using the PLATFORMS field. | | PLATFORMS_ARRAY | A", "\"_condition_selector\", ) class Operator: \"\"\" An object for storing operators for XCondition attributes", "| *SOME* | Filter using the JAN/UPC/EAN code. | | CATALOG | [str][]", "Type` means the type of value against which the field should be conditioned.", "Attribute | Field Value Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------------------------------| |", "[str][]s | *SOME* | Filter using an array of PLATFORMS. | | RELEASED", "or [str][] | *SOME + (%)* | Find the release using the original/official", "ORIG_LANG: t.Final[_ConditionProxy] = _ConditionProxy( \"orig_lang\", operator=Operator.fill_some() ) ORIG_LANG_ARRAY: t.Final[_ConditionProxy] = ORIG_LANG SEARCH: t.Final[_ConditionProxy]", "PLATFORMS. | \"\"\" # noqa: E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_some()) VN_ARRAY: t.Final[_ConditionProxy]", "[typing.Iterable][] of [str][]s | *(==)* | Find user using an array of usernames.", "operator=Operator.fill_some(\"~\") ) RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy(", "using a `None` value for `RELEASED`. | | RELEASED_DATE | date | *ALL*", "on the name, original and aliases fields. | \"\"\" # noqa: E501 NAME:", "Field Value Type | Operations Supported | Description | |--------------|-----------------------------------|----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | NAME |", "[typing.Iterable][] of [int][]s | *(==)* | Find characters linked to the given visual", "of producer. | | LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* |", "Filter using the first character of the VN or None to match all", "the release is a doujin. | | TYPE | [str][] | *SOME* |", "[None][] | *SOME* | Filter using a `None` value for `RELEASED`. | |", "| | VN_ARRAY | A [typing.Iterable][] of [int][]s | *(==)* | Find characters", "t.Final[_ConditionProxy] = _ConditionProxy(\"label\", operator=Operator(\"~\")) __slots__ = () def _condition_selector( type: t.Type[T], ): condition_map", "for `RELEASED`. | | RELEASED_DATE | date | *ALL* | Filter using the", "| Find characters by trait. | | TRAITS_ARRAY | A [typing.Iterable][] of [int][]s", "and releases. | | TAGS | [int][] | *SOME* | Find VNs by", "operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES FIRSTCHAR: t.Final[_ConditionProxy] = _ConditionProxy( \"firstchar\", operator=Operator.fill_some() )", "\"original\", operator=Operator.fill_some(\"~\") ) TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some() ) LANGUAGE: t.Final[_ConditionProxy] =", "Find characters by trait. | | TRAITS_ARRAY | A [typing.Iterable][] of [int][]s |", "t.Final[_ConditionProxy] = VN TRAITS: t.Final[_ConditionProxy] = _ConditionProxy( \"traits\", operator=Operator.fill_some() ) TRAITS_ARRAY: t.Final[_ConditionProxy] =", "| Find staff by alias ID. | | AID_ARRAY | A [typing.Iterable][] of", "the operator. \"\"\" self.symbols = symbols @classmethod def fill_some(cls, *symbols: str) -> Operator:", "noqa: E501 USERNAME: t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator.fill_some(\"~\") ) USERNAME_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy(", "operator=Operator.fill_some() ) __slots__ = () class VNCondition(BaseCondition): \"\"\" A class storing all the", "operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some() )", "Find user using an array of usernames. | \"\"\" # noqa: E501 USERNAME:", "= _ConditionProxy( \"type\", operator=Operator.fill_some() ) GTIN: t.Final[_ConditionProxy] = _ConditionProxy( \"gtin\", operator=Operator.fill_some() ) CATALOG:", "language of the VN. | | ORIG_LANG_ARRAY | A [typing.Iterable][] of [str][]s |", ") __slots__ = () class UlistLabelsCondition: \"\"\" A class storing all the attributes", "Operations Supported | Description | |--------------|-----------------------------------|----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | NAME | [str] | *SOME +", "using the original language of the VN. | | ORIG_LANG_ARRAY | A [typing.Iterable][]", "= TAGS __slots__ = () class ReleaseCondition(BaseCondition): \"\"\" A class storing all the", "the original languages of the VN. | | SEARCH | [str][] | *(%)*", "releases linked to the given producer ID. | | TITLE | [str][] |", "_ConditionProxy( \"catalog\", operator=Operator.fill_some() ) LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy]", "Type | Operations Supported | Description | |-----------|-----------------------------------|----------------------|------------------------------------------| | VN | [int][] |", "Filter using the release date of the VN. | | LANGUAGES | [None][]", "on the name, original and aliases fields. | | VN | [int][] |", "self.symbols = symbols @classmethod def fill_some(cls, *symbols: str) -> Operator: \"\"\" A factory", "| | TYPE | [str][] | *SOME* | Filter using the type of", "| | FIRST_CHAR | [None][] or [str][] | *SOME* | Filter using the", "BaseCondition: \"\"\" A base class storing the comman condition attributes. Tip: `ALL` below", "| *SOME + (%)* | Filter using the TITLE Field. | | PLATFORMS", "filters. | Attribute | Field Value Type | Operations Supported | Description |", "This method fills the `=`, `!=`, `>`, `<`, `>=`, `<=` symbols. \"\"\" return", "`SOME` and `X` operators are supported. For example: `|BaseCondition.ID| ALL |` supports (`==`,", "using original/official name of the character. Can't use `%` with `None`. | |", "TYPE | [str][] | *SOME* | Filter using the type of release. |", "A [typing.Iterable][] of [str][]s | *SOME* | Filter using an array of languages", "t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) VN: t.Final[_ConditionProxy]", "| Performs a search on the name, original and aliases fields. | |", "+ X` means `SOME` and `X` operators are supported. For example: `|BaseCondition.ID| ALL", "given visual novel IDs in the array. | | PRODUCER | [int][] |", "| |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------| | NAME | [str][] | *SOME + (%)* | Find using", "VNCondition, \"release\": ReleaseCondition, \"producer\": ProducerCondition, \"character\": CharacterCondition, \"staff\": StaffCondition, \"quote\": QuoteCondition, \"user\": UserCondition,", "VN_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"vn\", operator=Operator.fill_some() ) LABEL: t.Final[_ConditionProxy] = _ConditionProxy(\"label\", operator=Operator(\"~\")) __slots__", "the language, the release is available in. | | LANGUAGES_ARRAY | A [typing.Iterable][]", "| *(==)* | Check if the release is a patch. | | FREEWARE", "= TRAITS __slots__ = () class StaffCondition(BaseCondition): \"\"\" A class storing all the", "\"<\", \"<=\", *symbols) class BaseCondition: \"\"\" A base class storing the comman condition", "`>=`, `<=` symbols. \"\"\" return cls(\"=\", \"!=\", \">\", \">=\", \"<\", \"<=\", *symbols) class", "| Filter using type of producer. | | LANGUAGE | [str][] | *SOME*", "linked to the given visual novel ID array. | | TRAITS | [int][]", "the JAN/UPC/EAN code. | | CATALOG | [str][] | *SOME* | Filter using", "= _ConditionProxy( \"username\", operator=Operator.fill_some(\"~\") ) USERNAME_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator(\"=\") ) __slots__", "_ConditionProxy(\"doujin\", operator=Operator(\"=\")) TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some() ) GTIN: t.Final[_ConditionProxy] = _ConditionProxy(", "for `RELEASED`. | | RELEASED_DATE | [datetime.date][] | *ALL* | Filter using the", "of usernames. | \"\"\" # noqa: E501 USERNAME: t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator.fill_some(\"~\")", "[str] | *SOME + (%)* | Find using name of character. | |", "using user ID. The special value '0' is recognized as the currently logged", "by their username. | | USERNAME_ARRAY | A [typing.Iterable][] of [str][]s | *(==)*", "of [int][]s | *SOME* | Find all the releases linked to the given", "which match the values from the API. | Field | Field Value Type", "| Filter using a `None` value for `RELEASED`. | | RELEASED_DATE | date", "return chars that are not linked to any of the given traits. |", "supported for `None`) | | RELEASED | [None][] | *SOME* | Filter using", "| Field Value Type | Operations Supported | Description | |-----------|-----------------------------------|----------------------|------------------------------------------| | VN", "storing the comman condition attributes. Tip: `ALL` below means all operators (`==`, `!=`,", "iterable of objects which match the values from the API. | Field |", "| Field Value Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------| | NAME", "Filter using a `None` value for `RELEASED`. | | RELEASED_DATE | [datetime.date][] |", ") LABEL: t.Final[_ConditionProxy] = _ConditionProxy(\"label\", operator=Operator(\"~\")) __slots__ = () def _condition_selector( type: t.Type[T],", "# noqa: E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_all()) VN_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"vn\",", "NAME | [str] | *SOME + (%)* | Find using name of character.", "Description | |-----------|-----------------------------------|----------------------|------------------------------------------| | VN | [int][] | *ALL* | Find by visual", "| |-----------------|-----------------------------------|----------------------|----------------------------------------------------------------------------------------------| | VN | [int][] | *ALL* | Find releases linked to", "some symbols. Args: *symbols (str): The additional symbols of the operator. Returns: Operator:", "Find using name of producer. | | ORIGINAL | [None][] or [str][] |", "a search on the name, original and aliases fields. | | VN |", "| | RELEASED_DATE | [datetime.date][] | *ALL* | Filter using the release date", "*SOME* | Filter using the language, the VN is available in. | |", "= _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_some() ) RELEASED_DATE:", "= _ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy(", "of producer. | | SEARCH | [str][] | *(%)* | Performs a search", "there is neither `ALL` nor `SOME` in the condition but an operator is", "array. | | TRAITS | [int][] | *SOME* | Find characters by trait.", "VNs by tag. | | TAGS_ARRAY | A [typing.Iterable][] of [int][]s | *SOME*", "operator=Operator.fill_some() ) CATALOG: t.Final[_ConditionProxy] = _ConditionProxy( \"catalog\", operator=Operator.fill_some() ) LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy(", "information. Info: This one only supports `ID` and `ID_ARRAY` filters of `BaseCondition`. \"\"\"", "to any of the given traits. | \"\"\" # noqa: E501 NAME: t.Final[_ConditionProxy]", "t.Final[_ConditionProxy] = _ConditionProxy( \"orig_lang\", operator=Operator.fill_some() ) ORIG_LANG_ARRAY: t.Final[_ConditionProxy] = ORIG_LANG SEARCH: t.Final[_ConditionProxy] =", "| *SOME* | Filter using the Catalog number. | | LANGUAGES | [str][]", "of PLATFORMS. | | RELEASED | [None][] | *SOME* | Filter using a", "below means all operators (`==`, `!=`, `>`, `<`, `>=`, `<=`) are supported. `SOME`", "+ (%)* | Find using name of character. | | ORIGINAL | [None][]", "t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) __slots__ = () class CharacterCondition(BaseCondition): \"\"\" A class storing", "`<`, `>=`, `<=` symbols. \"\"\" return cls(\"=\", \"!=\", \">\", \">=\", \"<\", \"<=\", *symbols)", "`<=`) operators. `|BaseCondition.ID_ARRAY| SOME |` supports only (`==`, `!=`) operators. `|UserCondition.USERNAME| SOME +", "`BaseCondition` and doesn't have `ID` and `ID_ARRAY` filters. | Attribute | Field Value", "Hint: Check the `UlistLabelsCondition` class for more information. | Attribute | Field Value", "operator=Operator.fill_some(\"~\") ) SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator(\"=\")) VN_ARRAY:", "ID. | | VN_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* | Find", "\"character\": CharacterCondition, \"staff\": StaffCondition, \"quote\": QuoteCondition, \"user\": UserCondition, \"ulist-labels\": UlistLabelsCondition, \"ulist\": UlistCondition, }", "*SOME + (%)* | Find using original/official name of the producer. Can't use", "*(==)* | Find staff by alias ID. | | AID_ARRAY | A [typing.Iterable][]", "name of the producer. Can't use `%` with `None`. | | TYPE |", "of [int][]s | *SOME* | Find VNs using an array of tags. \"\"\"", "| The `=` filter will return chars that are linked to any (not", "TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some() ) LANGUAGE: t.Final[_ConditionProxy] = _ConditionProxy( \"language\", operator=Operator.fill_some()", "with `None`. | | SEARCH | [str][] | *(%)* | Performs a search", "attributes `User` type supports as condition. Hint: Check the `BaseCondition` class for more", "`|BaseCondition.ID_ARRAY| SOME |` supports only (`==`, `!=`) operators. `|UserCondition.USERNAME| SOME + (%)|` supports", "VN or None to match all the vn not starting with an alphabet.", "the type of release. | | GTIN | [int][] | *SOME* | Filter", "t.TYPE_CHECKING: from ..interface import T __all__ = ( \"VNCondition\", \"BaseCondition\", \"ReleaseCondition\", \"ProducerCondition\", \"CharacterCondition\",", "|-----------|-----------------------------------|----------------------|-------------------------------------------------------------| | AID | [int][] | *(==)* | Find staff by alias ID.", "| Field Value Type | Operations Supported | Description | |-----------|------------------|----------------------|------------------------------------------------------------------------------------------| | UID", "() class ProducerCondition(BaseCondition): \"\"\" A class storing all the attributes `Producer` type supports", "[typing.Iterable][] of [str][]s | *SOME* | Filter using an array of the original", "type of producer. | | LANGUAGE | [str][] | *SOME* | Filter using", "RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_all()", "E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_some()) VN_ARRAY: t.Final[_ConditionProxy] = VN PRODUCER: t.Final[_ConditionProxy] =", "conditioned against an Iterable of values and these fields yield an iterable of", "| TAGS_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* | Find VNs using", "*ALL* | Filter using the release date of the VN. | | LANGUAGES", "of the given traits, the `!=` filter will return chars that are not", "\"\"\" A class storing all the attributes `Producer` type supports as condition. Hint:", "Type | Operations Supported | Description | |--------------|-----------------------------------|----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | NAME | [str] |", "releases linked to the given visual novel IDs in the array. | |", "If there is neither `ALL` nor `SOME` in the condition but an operator", "__slots__ = () def _condition_selector( type: t.Type[T], ): condition_map = { \"vn\": VNCondition,", "LANGUAGE: t.Final[_ConditionProxy] = _ConditionProxy( \"language\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGE SEARCH: t.Final[_ConditionProxy]", "using the release date of the VN. | | PATCH | [bool][] |", "| Description | |-----------|------------------|----------------------|------------------------------------------------------------------------------------------| | UID | [int][] | *(==)* | Find using", "| [None][] | *SOME* | Filter using a `None` value for `RELEASED`. |", "t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some() )", "attributes `Release` type supports as condition. Hint: Check the `BaseCondition` class for more", "VNCondition(BaseCondition): \"\"\" A class storing all the attributes `VN` type supports as condition.", "values from the API. | Field | Field Value Type | Operations Supported", "| A [typing.Iterable][] of [str][]s | *(==)* | Find user using an array", "| USERNAME_ARRAY | A [typing.Iterable][] of [str][]s | *(==)* | Find user using", "| Find the release using the title. | | ORIGINAL | [None][] or", "operator=Operator.fill_some() ) TRAITS_ARRAY: t.Final[_ConditionProxy] = TRAITS __slots__ = () class StaffCondition(BaseCondition): \"\"\" A", "*SOME* | Filter using the type of release. | | GTIN | [int][]", ") ORIG_LANG: t.Final[_ConditionProxy] = _ConditionProxy( \"orig_lang\", operator=Operator.fill_some() ) ORIG_LANG_ARRAY: t.Final[_ConditionProxy] = ORIG_LANG SEARCH:", "`UlistLabelsCondition` class for more information. | Attribute | Field Value Type | Operations", "| Operations Supported | Description | |-----------|-----------------------------------|----------------------|-------------------------------------------------------------| | AID | [int][] | *(==)*", "*SOME + (%)* | Find using name of character. | | ORIGINAL |", "Filter using the original language of the VN. | | ORIG_LANG_ARRAY | A", "t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator(\"=\") ) __slots__ = () class UlistLabelsCondition: \"\"\" A", "*SOME* | Filter using the first character of the VN or None to", "| Filter using a `None` value for `RELEASED`. | | RELEASED_DATE | [datetime.date][]", "visual novel ID array. | | TRAITS | [int][] | *SOME* | Find", "and doesn't have `ID` and `ID_ARRAY` filters. | Attribute | Field Value Type", "base class storing the comman condition attributes. Tip: `ALL` below means all operators", "A class storing all the attributes `Producer` type supports as condition. Hint: Check", "storing all the attributes `User` type supports as condition. Hint: Check the `BaseCondition`", "the release date of the VN. | | PATCH | [bool][] | *(==)*", "all the attributes `VN` type supports as condition. Hint: Check the `BaseCondition` class", "class BaseCondition: \"\"\" A base class storing the comman condition attributes. Tip: `ALL`", "[typing.Iterable][] of [int][]s | SOME | Filter using an array of `ID`s.| \"\"\"", "then that means only that operator is supported. I hope you understand the", "ID_ARRAY | A [typing.Iterable][] of [int][]s | SOME | Filter using an array", ") LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES PLATFORMS: t.Final[_ConditionProxy] = _ConditionProxy( \"platforms\", operator=Operator.fill_some() ) PLATFORMS_ARRAY:", "Find staff by an array of alias IDs. | | SEARCH | [str][]", "_ConditionProxy(\"uid\", operator=Operator(\"=\")) class UlistCondition(UlistLabelsCondition): \"\"\" A class storing all the attributes `Ulist` type", "original and aliases fields. | | VN | [int][] | *(==)* | Find", "| [datetime.date][] | *ALL* | Filter using the release date of the VN.", "Value Type | Operations Supported | Description | |--------------|-----------------------------------|----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | NAME | [str]", "using the language, the VN is available in. | | LANGUAGES_ARRAY | A", "| Field Value Type | Operations Supported | Description | |-----------|-----------------------------------|----------------------|-------------------------------------------------------------| | AID", "_ConditionProxy(\"patch\", operator=Operator(\"=\")) FREEWARE: t.Final[_ConditionProxy] = _ConditionProxy( \"freeware\", operator=Operator(\"=\") ) DOUJIN: t.Final[_ConditionProxy] = _ConditionProxy(\"doujin\",", "`ID` | | ID_ARRAY | A [typing.Iterable][] of [int][]s | SOME | Filter", "| [str][] | *SOME* | Filter using the Catalog number. | | LANGUAGES", "condition support. Warning: This object is not meant to be created by users.", "| *SOME* | Find characters by trait. | | TRAITS_ARRAY | A [typing.Iterable][]", "\"\"\" # noqa: E501 NAME: t.Final[_ConditionProxy] = _ConditionProxy( \"name\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy]", "t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_some() )", "| [None][] or [str][] | *SOME + (%)* | Find the release using", "an array of PLATFORMS. | \"\"\" # noqa: E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\",", "| *SOME* | Filter using an array of the original languages of the", "} cls = condition_map[ type.__name__.lower() if type != UlistLabels else \"ulist-labels\" ] return", "[str][] | *SOME* | Filter using an array of PLATFORMS. | | PLATFORMS_ARRAY", "FIRST_CHAR | [None][] or [str][] | *SOME* | Filter using the first character", "supports as condition. Hint: Check the `BaseCondition` class for more information. Info: This", "A factory method for creating an Operator object with all symbols. Args: *symbols", "typing as t from .proxy import _ConditionProxy from ..objects import UlistLabels if t.TYPE_CHECKING:", "_ConditionProxy(\"vn\", operator=Operator.fill_all()) VN_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"vn\", operator=Operator.fill_some() ) LABEL: t.Final[_ConditionProxy] = _ConditionProxy(\"label\",", "with all symbols. Args: *symbols (str): The additional symbols of the operator. Returns:", "UlistCondition, } cls = condition_map[ type.__name__.lower() if type != UlistLabels else \"ulist-labels\" ]", "t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES PLATFORMS: t.Final[_ConditionProxy] =", "| SEARCH | [str][] | *(%)* | Search for the VN using it's", "a `None` value for `RELEASED`. | | RELEASED_DATE | date | *ALL* |", "I hope you understand the above. :) Tip: `Field Value Type` means the", "using the language, the release is available in. | | LANGUAGES_ARRAY | A", "= _ConditionProxy( \"date\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_all() ) PATCH:", ") LANGUAGES: t.Final[_ConditionProxy] = _ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES FIRSTCHAR:", "Attributes: symbols (t.Tuple[str]): The symbols of the operator. \"\"\" self.symbols = symbols @classmethod", "[str][] | *SOME + (%)* | Find user by their username. | |", "| Description | |-----------------|-----------------------------------|----------------------|----------------------------------------------------------------------------------------------| | VN | [int][] | *ALL* | Find releases", "array of PLATFORMS. | | PLATFORMS_ARRAY | A [typing.Iterable][] of [str][]s | *SOME*", "t.Final[_ConditionProxy] = PLATFORMS __slots__ = () class ProducerCondition(BaseCondition): \"\"\" A class storing all", "Supported | Description | |----------------|-----------------------------------|----------------------|----------------------------------------| | USERNAME | [str][] | *SOME + (%)*", "Value Type | Operations Supported | Description | |-----------|-----------------------------------|----------------------|-------------------------------------------------------------| | AID | [int][]", "Attribute | Field Value Type | Operations Supported | Description | |-----------|-----------------------------------|----------------------|------------------------------------------| |", "ID_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"id\", operator=Operator.fill_some() ) __slots__ = () class VNCondition(BaseCondition): \"\"\"", "the release using the title. | | ORIGINAL | [None][] or [str][] |", "PLATFORMS_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* | Filter using an array", "LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* | Filter using the array", "TITLE | [str][] | *SOME + (%)* | Find the release using the", "using an array of tags. \"\"\" # noqa: E501 TITLE: t.Final[_ConditionProxy] = _ConditionProxy(", "A class storing all the attributes `VN` type supports as condition. Hint: Check", "| Performs a search on the name, original and aliases fields. | \"\"\"", "Find characters linked to the given visual novel ID array. | | TRAITS", "*SOME* | Find VNs by tag. | | TAGS_ARRAY | A [typing.Iterable][] of", "given traits, the `!=` filter will return chars that are not linked to", "for more information. | Attribute | Field Value Type | Operations Supported |", "using the array of languages, the VN is available in. | | FIRST_CHAR", "def __init__(self, *symbols: str) -> None: \"\"\" Operator constructor. Args: *symbols (str): The", "A class storing all the attributes `Release` type supports as condition. Hint: Check", "| Check if the release is a freeware. | | DOUJIN | [bool][]", "factory method for creating an Operator object with some symbols. Args: *symbols (str):", "Check if the release is a freeware. | | DOUJIN | [bool][] |", "attributes `Character` type supports as condition. Hint: Check the `BaseCondition` class for more", "LANGUAGES FIRSTCHAR: t.Final[_ConditionProxy] = _ConditionProxy( \"firstchar\", operator=Operator.fill_some() ) ORIG_LANG: t.Final[_ConditionProxy] = _ConditionProxy( \"orig_lang\",", "_ConditionProxy( \"id\", operator=Operator.fill_some() ) __slots__ = () class VNCondition(BaseCondition): \"\"\" A class storing", "| Find releases linked to the given visual novel ID. | | VN_ARRAY", "| *SOME* | Filter using an array of PLATFORMS. | | RELEASED |", "with some symbols. Args: *symbols (str): The additional symbols of the operator. Returns:", "# noqa: E501 ID: t.Final[_ConditionProxy] = _ConditionProxy(\"id\", operator=Operator.fill_all()) ID_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"id\",", "| Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------------------------------| | TITLE | [str][] | *SOME + (%)* |", "| |----------|-----------------------------------|----------------------|--------------------------------| | ID | [int][] | ALL | Filter using an `ID`", "use `%` with `None`. | | SEARCH | [str][] | *(%)* | Performs", ") RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_all() ) PATCH: t.Final[_ConditionProxy] = _ConditionProxy(\"patch\", operator=Operator(\"=\"))", "visual novel IDs. | | LABEL | [int][] | *(==)* | Label assigned", "\"gtin\", operator=Operator.fill_some() ) CATALOG: t.Final[_ConditionProxy] = _ConditionProxy( \"catalog\", operator=Operator.fill_some() ) LANGUAGES: t.Final[_ConditionProxy] =", "= _ConditionProxy( \"languages\", operator=Operator.fill_some() ) LANGUAGES_ARRAY: t.Final[_ConditionProxy] = LANGUAGES FIRSTCHAR: t.Final[_ConditionProxy] = _ConditionProxy(", "| |----------------|-----------------------------------|----------------------|----------------------------------------| | USERNAME | [str][] | *SOME + (%)* | Find user", "*(==)* | Find user using an array of usernames. | \"\"\" # noqa:", "= _ConditionProxy(\"vn\", operator=Operator.fill_some()) VN_ARRAY: t.Final[_ConditionProxy] = VN PRODUCER: t.Final[_ConditionProxy] = _ConditionProxy( \"producer\", operator=Operator(\"=\")", "class for more information. | Attribute | Field Value Type | Operations Supported", "_ConditionProxy( \"producer\", operator=Operator(\"=\") ) TITLE: t.Final[_ConditionProxy] = _ConditionProxy( \"title\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy]", "not starting with an alphabet. | | ORIG_LANG | [str][] | *SOME* |", "| FREEWARE | [bool][] | *(==)* | Check if the release is a", "[int][]s | *SOME* | Find VNs using an array of tags. \"\"\" #", "release date of the VN. | | LANGUAGES | [None][] or [str][] |", "in the array. | | PRODUCER | [int][] | *(==)* | Find releases", "available in. | | LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s | *SOME* |", "| | USERNAME_ARRAY | A [typing.Iterable][] of [str][]s | *(==)* | Find user", "This class doesn't inherit from `BaseCondition` and doesn't have `ID` and `ID_ARRAY` filters.", "| Find using user ID. The special value '0' is recognized as the", "| *(==)* | Find characters linked to the given visual novel ID array.", "| PLATFORMS | [str][] | *SOME* | Filter using an array of PLATFORMS.", "IDs. | | SEARCH | [str][] | *(%)* | Performs a search on", "| AID_ARRAY | A [typing.Iterable][] of [int][]s | *(==)* | Find staff by", "all symbols. Args: *symbols (str): The additional symbols of the operator. Returns: Operator:", "Filter using type of producer. | | LANGUAGE | [str][] | *SOME* |", "() class ReleaseCondition(BaseCondition): \"\"\" A class storing all the attributes `Release` type supports", "noqa: E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_all()) VN_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"vn\", operator=Operator.fill_some()", "Field Value Type | Operations Supported | Description | |-----------|-----------------------------------|----------------------|------------------------------------------| | VN |", "\"\"\" A class storing all the attributes `UlistLabels` type supports as condition. Info:", "created Operator object. Info: This method fills the `=` and `!=` symbols. \"\"\"", "import UlistLabels if t.TYPE_CHECKING: from ..interface import T __all__ = ( \"VNCondition\", \"BaseCondition\",", "using the original/official title. (`%` operation not supported for `None`) | | RELEASED", "`ID_ARRAY` filters. | Attribute | Field Value Type | Operations Supported | Description", "operator=Operator.fill_all()) VN_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"vn\", operator=Operator.fill_some() ) LABEL: t.Final[_ConditionProxy] = _ConditionProxy(\"label\", operator=Operator(\"~\"))", "using it's title and releases. | | TAGS | [int][] | *SOME* |", "PLATFORMS RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"released\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"released\",", "E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_all()) VN_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"vn\", operator=Operator.fill_some() )", "on the name, original and aliases fields. | \"\"\" # noqa: E501 AID:", "| *(==)* | Find staff by an array of alias IDs. | |", "| Description | |-----------|-----------------------------------|----------------------|-------------------------------------------------------------| | AID | [int][] | *(==)* | Find staff", "Field Value Type | Operations Supported | Description | |-----------|------------------|----------------------|------------------------------------------------------------------------------------------| | UID |", "class UlistLabelsCondition: \"\"\" A class storing all the attributes `UlistLabels` type supports as", "[str][] | *SOME* | Filter using the PLATFORMS field. | | PLATFORMS_ARRAY |", "an iterable of objects which match the values from the API. | Field", "Value Type | Operations Supported | Description | |----------------|-----------------------------------|----------------------|----------------------------------------| | USERNAME | [str][]", "against which the field should be conditioned. Tip: All `X_ARRAY` fields must be", "| Filter using the type of release. | | GTIN | [int][] |", "| [str][] | *SOME* | Filter using the type of release. | |", "Operator: \"\"\" A factory method for creating an Operator object with all symbols.", "array of PLATFORMS. | \"\"\" # noqa: E501 VN: t.Final[_ConditionProxy] = _ConditionProxy(\"vn\", operator=Operator.fill_some())", "novel ID. | | VN_ARRAY | A [typing.Iterable][] of [int][]s | *SOME* |", "`SOME` in the condition but an operator is specified, then that means only", "An object for storing operators for XCondition attributes to check condition support. Warning:", "TAGS_ARRAY: t.Final[_ConditionProxy] = TAGS __slots__ = () class ReleaseCondition(BaseCondition): \"\"\" A class storing", "attributes `UlistLabels` type supports as condition. Info: This class doesn't inherit from `BaseCondition`", "attributes. Tip: `ALL` below means all operators (`==`, `!=`, `>`, `<`, `>=`, `<=`)", "method for creating an Operator object with all symbols. Args: *symbols (str): The", "'0' is recognized as the currently logged in user. | \"\"\" # noqa:", "the release is a freeware. | | DOUJIN | [bool][] | *(==)* |", "supported. `SOME` means only operators (`==`, `!=`) are supported. `SOME + X` means", "operator=Operator(\"~\")) TAGS: t.Final[_ConditionProxy] = _ConditionProxy(\"tags\", Operator.fill_some()) TAGS_ARRAY: t.Final[_ConditionProxy] = TAGS __slots__ = ()", "release is available in. | | LANGUAGES_ARRAY | A [typing.Iterable][] of [str][]s |", "all) of the given traits, the `!=` filter will return chars that are", ") RELEASED: t.Final[_ConditionProxy] = _ConditionProxy( \"date\", operator=Operator.fill_some() ) RELEASED_DATE: t.Final[_ConditionProxy] = _ConditionProxy( \"date\",", "name of the character. Can't use `%` with `None`. | | SEARCH |", "DOUJIN: t.Final[_ConditionProxy] = _ConditionProxy(\"doujin\", operator=Operator(\"=\")) TYPE: t.Final[_ConditionProxy] = _ConditionProxy( \"type\", operator=Operator.fill_some() ) GTIN:", "_ConditionProxy( \"name\", operator=Operator.fill_some(\"~\") ) ORIGINAL: t.Final[_ConditionProxy] = _ConditionProxy( \"original\", operator=Operator.fill_some(\"~\") ) TYPE: t.Final[_ConditionProxy]", "the VN. | | LANGUAGES | [None][] or [str][] | *SOME* | Filter", "the releases linked to the given visual novel IDs in the array. |", "| Find VNs by tag. | | TAGS_ARRAY | A [typing.Iterable][] of [int][]s", "| | TAGS | [int][] | *SOME* | Find VNs by tag. |", "| Find releases linked to the given producer ID. | | TITLE |", "`!=` symbols. \"\"\" return cls(\"=\", \"!=\", *symbols) @classmethod def fill_all(cls, *symbols: str) ->", "| Filter using the language, the VN is available in. | | LANGUAGES_ARRAY", "release is available in. | | PLATFORMS | [str][] | *SOME* | Filter", "only operators (`==`, `!=`) are supported. `SOME + X` means `SOME` and `X`", "Operator: \"\"\" A factory method for creating an Operator object with some symbols.", "class for more information. Info: This one only supports `ID` and `ID_ARRAY` filters", "\"orig_lang\", operator=Operator.fill_some() ) ORIG_LANG_ARRAY: t.Final[_ConditionProxy] = ORIG_LANG SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) TAGS:", "`RELEASED`. | | RELEASED_DATE | date | *ALL* | Filter using the release", "= VN PRODUCER: t.Final[_ConditionProxy] = _ConditionProxy( \"producer\", operator=Operator(\"=\") ) TITLE: t.Final[_ConditionProxy] = _ConditionProxy(", "Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------------------------------| | TITLE | [str][] |", "__slots__ = () class StaffCondition(BaseCondition): \"\"\" A class storing all the attributes `Staff`", "all the vn not starting with an alphabet. | | ORIG_LANG | [str][]", "*SOME* | Filter using an array of PLATFORMS. | | RELEASED | [None][]", "to match all the vn not starting with an alphabet. | | ORIG_LANG", "__slots__ = () class VNCondition(BaseCondition): \"\"\" A class storing all the attributes `VN`", "operator=Operator.fill_some()) VN_ARRAY: t.Final[_ConditionProxy] = VN PRODUCER: t.Final[_ConditionProxy] = _ConditionProxy( \"producer\", operator=Operator(\"=\") ) TITLE:", "| PLATFORMS | [None][] or [str][] | *SOME* | Filter using the PLATFORMS", "| Attribute | Field Value Type | Operations Supported | Description | |-----------------|-----------------------------------|----------------------|-------------------------------------------------------------------------------------------------------|", "an array of PLATFORMS. | | PLATFORMS_ARRAY | A [typing.Iterable][] of [str][]s |", "`!=` filter will return chars that are not linked to any of the", ") PLATFORMS_ARRAY: t.Final[_ConditionProxy] = PLATFORMS __slots__ = () class ProducerCondition(BaseCondition): \"\"\" A class", "| RELEASED_DATE | [datetime.date][] | *ALL* | Filter using the release date of", "| | PRODUCER | [int][] | *(==)* | Find releases linked to the", "the release date of the VN. | | LANGUAGES | [None][] or [str][]", "t.Final[_ConditionProxy] = _ConditionProxy(\"patch\", operator=Operator(\"=\")) FREEWARE: t.Final[_ConditionProxy] = _ConditionProxy( \"freeware\", operator=Operator(\"=\") ) DOUJIN: t.Final[_ConditionProxy]", "__slots__ = () class ProducerCondition(BaseCondition): \"\"\" A class storing all the attributes `Producer`", "| | TRAITS | [int][] | *SOME* | Find characters by trait. |", "| [str] | *SOME + (%)* | Find using name of character. |", "[str][]s | *SOME* | Filter using an array of languages of producer. |", "chars that are linked to any (not all) of the given traits, the", "USERNAME: t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator.fill_some(\"~\") ) USERNAME_ARRAY: t.Final[_ConditionProxy] = _ConditionProxy( \"username\", operator=Operator(\"=\")", "VN TRAITS: t.Final[_ConditionProxy] = _ConditionProxy( \"traits\", operator=Operator.fill_some() ) TRAITS_ARRAY: t.Final[_ConditionProxy] = TRAITS __slots__", "\"\"\" An object for storing operators for XCondition attributes to check condition support.", "-> None: \"\"\" Operator constructor. Args: *symbols (str): The symbols of the operator.", "`X` operators are supported. For example: `|BaseCondition.ID| ALL |` supports (`==`, `!=`, `>`,", "SEARCH: t.Final[_ConditionProxy] = _ConditionProxy(\"search\", operator=Operator(\"~\")) TAGS: t.Final[_ConditionProxy] = _ConditionProxy(\"tags\", Operator.fill_some()) TAGS_ARRAY: t.Final[_ConditionProxy] =", "means the type of value against which the field should be conditioned. Tip:", "| | LANGUAGES | [None][] or [str][] | *SOME* | Filter using the", "`None` value for `RELEASED`. | | RELEASED_DATE | [datetime.date][] | *ALL* | Filter", "Filter using an array of `ID`s.| \"\"\" # noqa: E501 ID: t.Final[_ConditionProxy] =", "one only supports `ID` and `ID_ARRAY` filters of `BaseCondition`. \"\"\" # noqa: E501", "available in. | | FIRST_CHAR | [None][] or [str][] | *SOME* | Filter", "vn not starting with an alphabet. | | ORIG_LANG | [str][] | *SOME*", "| [int][] | *ALL* | Find by visual novel ID. | | VN_ARRAY", "(`%` operation not supported for `None`) | | RELEASED | [None][] | *SOME*" ]
[ "block of encoded numbers | **Pre:** | len(plain) == 256 | **Post:** |", "encoded number | **Pre:** | plain >= 0 | plain < 256 |", "decoded[i] = decoded[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI return decoded def encode(self, plain: bytearray)", "decoded = self.pBox.decode(encoded, pSeed) for i in range(256): seedAtI = self.seed[i] for invertedJ", "to improve performance decoded[i] = decoded[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI return decoded def", "== 256 | seed[i] >= 1 | **Modifies:** | self.seed[i] \"\"\" for i", "if (emptyCounter < targetEmpty): index = (index+1)%(256*8) self.encodeMap[index] = i for i in", "None def test_simple(self): plain = bytearray() for i in range(256): plain.append(randint(0, 255)) for", "= (index+1)%256 self.encodeMap[index] = i for i in range(256): self.decodeMap[self.encodeMap[i]] = i def", "for i in range(256): pSeed = (pSeed+self.seed[i])%256 encoded = self.encodeRound(plain, 0, pSeed) for", "decode data Parameters: pw: password | **Pre:** | len(pw) == 256 | **Post:**", "invertedI in range(7): i = 6-invertedI decoded = self.decodeRound(decoded, i, pSeed) for i", "if (index < 0): index += 2048 index8 = int(index/8) decoded[index8] = decoded[index8]+(1<<(index%8))", "bytearray(256) for i in range(256): seed[i] = self.seed[i] return seed def setSeed(self, seed:", "i in range(4096): self.pw.append(randint(0, 255)) self.spBox = SPBox(self.pw) def tearDown(self): self.pw = None", "> 0: self.buffer = encoded return returnvalue def close(self): return bytearray() class SBox:", "+= 1 if (emptyCounter < targetEmpty): index = (index+1)%(256*8) self.encodeMap[index] = i for", "= None def test_simple(self): plain = bytearray() for i in range(256): plain.append(randint(0, 255))", "**Pre:** | len(pw) == 4096 | len(seed) == 256 | seed[i] >= 1", "+= 1 self.spBox = SPBox(password) self.buffer = None self.seeded = False def decode(self,", "for i in range(256): seedAtI = self.seed[i] encoded[i] = plain[i] ^ self.sBoxes[round].encodeMap[i] ^", "\"\"\" pSeed = 0 for i in range(256): pSeed = (pSeed+self.seed[i])%256 encoded =", "self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches == 256) class SPBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray()", "Parameters: encoded: encoded number Returns: decoded number | **Pre:** | encoded >= 0", "= self.spBox.getSeed() self.spBox.setSeed(seed) decoded = self.spBox.decode(encoded) decodedMatches = 0 seedMatches = 0 for", "| **Modifies:** | self.seed[i] \"\"\" for i in range(256): self.seed[i] = seed[i] #", "List class Encoder: def __init__(self, pw: str): password = bytearray() for c in", "from typing import Dict, Tuple, List class Encoder: def __init__(self, pw: str): password", "in range(256): ba.append(encoded.pop(0)) if (self.seeded): decoded = self.spBox.decode(ba) returnvalue.extend(decoded) else: self.spBox.setSeed(ba) self.seeded =", "replacement for SBox.encode() to improve performance encoded = self.pBox.encode(encoded, pSeed) return encoded def", "i in range(256*8): self.decodeMap[self.encodeMap[i]] = i def encode(self, plain: bytearray, seed: int) ->", "None self.spBox = None def test_simple(self): plain = bytearray() for i in range(256):", "-> bytearray: \"\"\" Encodes a block of plain numbers. Parameters: plain: block of", "< 256 | **Post:** | len(return) == 256 | return[i] >= 0 |", "0 maxEmpty = 256*8-i targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter < targetEmpty): if (self.encodeMap[index]", "= self.buffer+plain self.buffer = None if (not self.seeded): ba = self.spBox.getSeed() returnvalue.extend(ba) self.seeded", "256 | self.decodeMap[i] >= 0 | self.decodeMap[i] < 256 \"\"\" def __init__(self, pw:", "Returns: decoded number | **Pre:** | encoded >= 0 | encoded < 256", "ba = bytearray() for i in range(256): ba.append(plain.pop(0)) encoded = self.spBox.encode(ba) returnvalue.extend(encoded) if", ">= 0 | self.decodeMap[i] < 2048 \"\"\" def __init__(self, pw: bytearray): self.encodeMap: List[int]", "encodedMatches += 1 if (plain[i] == decoded[i]): decodedMatches += 1 self.assertTrue(encodedMatches < 256/10)", "len(return) == 256 | return[i] >= 1 \"\"\" seed = bytearray(256) for i", "256-i targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter < targetEmpty): if (self.encodeMap[index] == -1): emptyCounter", "-1): emptyCounter += 1 if (emptyCounter < targetEmpty): index = (index+1)%256 self.encodeMap[index] =", "0, pSeed) for i in range(7): encoded = self.encodeRound(encoded, i+1, pSeed) for i", "< 256: self.buffer.append(randint(0, 255)) return self.encode(bytearray()) class Decoder: def __init__(self, pw: str): password", "== 256 | **Modifies:** | self.seed[i] \"\"\" pSeed = 0 for i in", "len(plain) seed = self.spBox.getSeed() for i in range(256): self.assertTrue(self.spBox.seed[i] != 0) encoded =", "\"\"\" for i in range(256): self.seed[i] = seed[i] # TODO change general parameter", "| len(return) == 256 \"\"\" encoded = bytearray(256) for i in range(256): seedAtI", "256 | **Post:** | len(return) == 256 \"\"\" encoded = bytearray(256) for i", "seed Returns: block of decoded numbers | **Pre:** | len(encoded) == 256 |", "-1): emptyCounter += 1 if (emptyCounter < targetEmpty): index = (index+1)%(256*8) self.encodeMap[index] =", "< 4096: password.append(ord(pw[index%len(pw)])) index += 1 self.spBox = SPBox(password) self.buffer = None self.seeded", "1 self.assertTrue(decodedMatches == length) # TODO encodeMatches self.assertTrue(seedMatches < 256/10) # TODO encode", "decoded[i]] # replacement for SBox.decode() to improve performance decoded[i] = decoded[i] ^ self.sBoxes[round].encodeMap[i]", "policy: all parameters may be edited by functions, no deepcopy needed #TODO change", "| self.encodeMap[i] >= 0 | self.encodeMap[i] < 256 | len(self.decodeMap) == 256 |", "pSeed: int) -> bytearray: \"\"\" Encodes a block of plain numbers. Parameters: plain:", "pw: bytearray): self.encodeMap: List[int] = [-1]*256 self.decodeMap: List[int] = [-1]*256 index = 0", "!= 0) seed2 = self.spBox.getSeed() self.spBox.setSeed(seed) decoded = self.spBox.decode(encoded) decodedMatches = 0 seedMatches", ">= 0 | encoded < 256 | **Post:** | return >= 0 |", "SPBox(password) self.buffer = None self.seeded = False def decode(self, encoded: bytearray): returnvalue =", "len(pw) == 4096 | len(seed) == 256 | seed[i] >= 1 | **Post:**", "= self.spBox.encode(plain) for i in range(256): self.assertTrue(self.spBox.seed[i] != 0) seed2 = self.spBox.getSeed() self.spBox.setSeed(seed)", "if self.buffer is not None: encoded = self.buffer+encoded self.buffer = None while len(encoded)", "= self.sBoxes[j].encodeMap[ encoded[i]] # replacement for SBox.encode() to improve performance encoded = self.pBox.encode(encoded,", "None): seed = bytearray(256) for i in range(256): seed[i] = randint(1, 255) self.seed:", "0): decoded[i] = self.sBoxes[j].decodeMap[ decoded[i]] # replacement for SBox.decode() to improve performance decoded[i]", "used to encode data decodeMap: lookuptable used to decode data Parameters: pw: password", "in range(256): self.assertTrue(self.spBox.seed[i] != 0) encoded = self.spBox.encode(plain) for i in range(256): self.assertTrue(self.spBox.seed[i]", "emptyCounter += 1 if (emptyCounter < targetEmpty): index = (index+1)%256 self.encodeMap[index] = i", "range(256): seedAtI = self.seed[i] encoded[i] = plain[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI for j", "ba = self.spBox.getSeed() returnvalue.extend(ba) self.seeded = True while len(plain) >= 256: ba =", "0 | self.encodeMap[i] < 256 | len(self.decodeMap) == 256 | self.decodeMap[i] >= 0", "bytearray(256) for i in range(256): indexVar = i*8 for b in range(8): if", "plain numbers round: iteration of encode pSeed: seed for PBox Returns: block of", "index8 = int(index/8) encoded[index8] = encoded[index8]+(1<<(index%8)) return encoded def decode(self, encoded: bytearray, seed:", "block of encoded numbers round: iteration of decode pSeed: seed for PBox Returns:", "self.pBox.decode(encoded, seed) decodedMatches = 0 encodedMatches = 0 for i in range(256): if", "0 | self.decodeMap[i] < 2048 \"\"\" def __init__(self, pw: bytearray): self.encodeMap: List[int] =", "a transposition cipher. Attributes: encodeMap: lookuptable used to encode data decodeMap: lookuptable used", "self.seeded = True if len(encoded) > 0: self.buffer = encoded return returnvalue def", "decoded numbers | **Pre:** | len(encoded) == 256 | seed >= 0 |", "| **Post:** | len(return) == 256 | return[i] >= 0 | return[i] <", "| len(seed) == 256 | seed[i] >= 1 | **Post:** | len(self.sBoxes) ==", "= SPBox(password) self.buffer = None self.seeded = False def decode(self, encoded: bytearray): returnvalue", "self.seeded): ba = self.spBox.getSeed() returnvalue.extend(ba) self.seeded = True while len(plain) >= 256: ba", "in range(8): if ((encoded[i]) & (1<<b)): index = self.decodeMap[indexVar+b]-seed if (index < 0):", "| seed[i] >= 1 | **Modifies:** | self.seed[i] \"\"\" for i in range(256):", ">= 1 | **Modifies:** | self.seed[i] \"\"\" for i in range(256): self.seed[i] =", "targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter < targetEmpty): if (self.encodeMap[index] == -1): emptyCounter +=", "block of plain numbers seed: seed Returns: block of encoded numbers | **Pre:**", "seed2[i]): seedMatches += 1 for i in range(length): if (plain[i] == decoded[i]): decodedMatches", "return >= 0 | return < 256 \"\"\" return self.encodeMap[plain] def decode(self, encoded:", "256 | len(self.decodeMap) == 256 | self.decodeMap[i] >= 0 | self.decodeMap[i] < 256", "\"\"\" encoded = bytearray(256) for i in range(256): indexVar = i*8+seed for b", "| len(encoded) == 256 | round >= 0 | round < 8 |", "self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches == 256) class PBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray()", "encoded[i]): encodedMatches += 1 if (plain[i] == decoded[i]): decodedMatches += 1 self.assertTrue(encodedMatches <", "Attributes: sBoxes: list of SBoxes used for substitution seed: seed pBox: PBox used", "| len(plain) == 256 | **Post:** | len(return) == 256 | **Modifies:** |", "used for substitution seed: seed pBox: PBox used for permutation Parameters: pw: password", "bytearray): returnvalue = bytearray() if self.buffer is not None: encoded = self.buffer+encoded self.buffer", "of encoded numbers. Parameters: encoded: block of encoded numbers seed: seed Returns: block", "decoded numbers | **Pre:** | len(encoded) == 256 | encoded[i] >= 0 |", "in range(8): if ((seedAtI & (1<<j)) != 0): encoded[i] = self.sBoxes[j].encodeMap[ encoded[i]] #", "seed = bytearray(256) for i in range(256): seed[i] = self.seed[i] return seed def", "self.buffer = None while len(encoded) >= 256: ba = bytearray() for i in", "self.decodeMap[i] < 256 \"\"\" def __init__(self, pw: bytearray): self.encodeMap: List[int] = [-1]*256 self.decodeMap:", "= self.seed[i] for invertedJ in range(8): j = 8-1-invertedJ if ((seedAtI & (1<<j))", "self.buffer is not None: encoded = self.buffer+encoded self.buffer = None while len(encoded) >=", "number Returns: encoded number | **Pre:** | plain >= 0 | plain <", "255)) self.pBox = PBox(self.pw) def tearDown(self): self.pw = None self.pBox = None def", "(plain[i] == encoded[i]): encodedMatches += 1 if (plain[i] == decoded[i]): decodedMatches += 1", "= None self.seeded = False def decode(self, encoded: bytearray): returnvalue = bytearray() if", "returnvalue def close(self): while len(self.buffer) < 256: self.buffer.append(randint(0, 255)) return self.encode(bytearray()) class Decoder:", "i in range(256): self.seed[i] = decoded[i] ^ self.seed[i] if (self.seed[i] == 0): self.seed[i]", "self.spBox.decode(encoded) decodedMatches = 0 seedMatches = 0 for i in range(256): if (seed[i]", "= 0 encodedMatches = 0 for i in range(256): plain = i encoded", "+= 1 self.assertTrue(decodedMatches == length) # TODO encodeMatches self.assertTrue(seedMatches < 256/10) # TODO", "bytearray(256) for i in range(256): spw[i] = pw[s*256+i] self.sBoxes[s] = SBox(spw) ppw =", "is a transposition cipher. Attributes: encodeMap: lookuptable used to encode data decodeMap: lookuptable", "self.seed[i] if (self.seed[i] == 0): self.seed[i] = 1 return decoded def getSeed(self) ->", "256 | **Post:** | len(return) == 256 | return[i] >= 0 | return[i]", "encoded): encodedMatches += 1 if (plain == decoded): decodedMatches += 1 self.assertTrue(encodedMatches <", "if (plain == decoded): decodedMatches += 1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches == 256)", ">= 0 | return < 256 \"\"\" return self.encodeMap[plain] def decode(self, encoded: int)", "| return[i] >= 0 | return[i] < 256 \"\"\" decoded = bytearray(256) for", "< 256 | **Modifies:** | self.seed[i] \"\"\" pSeed = 0 for i in", "improve performance decoded[i] = decoded[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI return decoded def encode(self,", "== 256 | return[i] >= 0 | return[i] < 256 | **Modifies:** |", "| len(return) == 256 | return[i] >= 0 | return[i] < 256 \"\"\"", "seed numbers | **Post:** | len(return) == 256 | return[i] >= 1 \"\"\"", "return < 256 \"\"\" return self.encodeMap[plain] def decode(self, encoded: int) -> int: \"\"\"", "targetEmpty): index = (index+1)%(256*8) self.encodeMap[index] = i for i in range(256*8): self.decodeMap[self.encodeMap[i]] =", "== -1): emptyCounter += 1 if (emptyCounter < targetEmpty): index = (index+1)%256 self.encodeMap[index]", "numbers seed: seed Returns: block of decoded numbers | **Pre:** | len(encoded) ==", "in range(8): spw = bytearray(256) for i in range(256): spw[i] = pw[s*256+i] self.sBoxes[s]", "range(256): pSeed = (pSeed+self.seed[i])%256 encoded = self.encodeRound(plain, 0, pSeed) for i in range(7):", "len(return) == 256 | **Modifies:** | self.seed[i] \"\"\" pSeed = 0 for i", "class SBox: \"\"\" SBox is a substitution cipher. Attributes: encodeMap: lookuptable used to", "| **Pre:** | len(plain) == 256 | seed >= 0 | seed <", "encoded: int) -> int: \"\"\" Decodes a single encoded number. Parameters: encoded: encoded", "**Pre:** | len(pw) == 2048 | **Post:** | len(self.encodeMap) == 2048 | self.encodeMap[i]", "range(256): seedAtI = self.seed[i] for invertedJ in range(8): j = 8-1-invertedJ if ((seedAtI", ">= 256: ba = bytearray() for i in range(256): ba.append(plain.pop(0)) encoded = self.spBox.encode(ba)", "= 1 return decoded def getSeed(self) -> bytearray: \"\"\" Gets the seed. Returns:", "data Parameters: pw: password | **Pre:** | len(pw) == 256 | **Post:** |", "for i in range(256): if (seed[i] == seed2[i]): seedMatches += 1 for i", "256) class SPBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i in range(4096): self.pw.append(randint(0,", "(index+1)%256 self.encodeMap[index] = i for i in range(256): self.decodeMap[self.encodeMap[i]] = i def encode(self,", "encoded[i]] # replacement for SBox.encode() to improve performance encoded = self.pBox.encode(encoded, pSeed) return", "len(return) == 256 \"\"\" decoded = self.pBox.decode(encoded, pSeed) for i in range(256): seedAtI", "__init__(self, pw: bytearray): self.encodeMap: List[int] = [-1]*256 self.decodeMap: List[int] = [-1]*256 index =", "PBox used for permutation Parameters: pw: password seed: seed | **Pre:** | len(pw)", "invertedJ in range(8): j = 8-1-invertedJ if ((seedAtI & (1<<j)) != 0): decoded[i]", "encode(self, plain: bytearray) -> bytearray: \"\"\" Encodes a block of plain numbers. Parameters:", "!= 0): decoded[i] = self.sBoxes[j].decodeMap[ decoded[i]] # replacement for SBox.decode() to improve performance", "self.encodeMap[(b+indexVar)%2048] index8 = int(index/8) encoded[index8] = encoded[index8]+(1<<(index%8)) return encoded def decode(self, encoded: bytearray,", "decodedMatches = 0 encodedMatches = 0 for i in range(256): if (plain[i] ==", "0 | encoded < 256 | **Post:** | return >= 0 | return", "def decodeRound(self, encoded: bytearray, round: int, pSeed: int) -> bytearray: \"\"\" Decodes a", "of seed numbers | **Pre:** | len(seed) == 256 | seed[i] >= 1", "self.decodeMap[indexVar+b]-seed if (index < 0): index += 2048 index8 = int(index/8) decoded[index8] =", "len(encoded) == 256 | seed >= 0 | seed < 256 | **Post:**", "self.assertTrue(decodedMatches == 256) class PBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i in", "self.sBoxes[j].encodeMap[ encoded[i]] # replacement for SBox.encode() to improve performance encoded = self.pBox.encode(encoded, pSeed)", "def __init__(self, pw: str): password = bytearray() for c in pw: password.append(ord(c)) index", "for i in range(256*8): emptyCounter = 0 maxEmpty = 256*8-i targetEmpty = 1+(pw[i]%maxEmpty)", "= False def decode(self, encoded: bytearray): returnvalue = bytearray() if self.buffer is not", "int) -> int: \"\"\" Decodes a single encoded number. Parameters: encoded: encoded number", "[None]*8 if (seed is None): seed = bytearray(256) for i in range(256): seed[i]", "self.buffer = None self.seeded = False def decode(self, encoded: bytearray): returnvalue = bytearray()", "# replacement for SBox.decode() to improve performance decoded[i] = decoded[i] ^ self.sBoxes[round].encodeMap[i] ^", "numbers seed: seed Returns: block of encoded numbers | **Pre:** | len(plain) ==", "range(256): self.seed[i] = plain[i] ^ self.seed[i] if (self.seed[i] == 0): self.seed[i] = 1", "(index+1)%(256*8) self.encodeMap[index] = i for i in range(256*8): self.decodeMap[self.encodeMap[i]] = i def encode(self,", "class PBox: \"\"\" PBox is a transposition cipher. Attributes: encodeMap: lookuptable used to", "for SBox.decode() to improve performance decoded[i] = decoded[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI return", ">= 0 | return[i] < 256 \"\"\" decoded = bytearray(256) for i in", "len(seed) == 256 | seed[i] >= 1 | **Modifies:** | self.seed[i] \"\"\" for", "of plain numbers Returns: block of encoded numbers | **Pre:** | len(plain) ==", "255)) self.spBox = SPBox(self.pw) def tearDown(self): self.pw = None self.spBox = None def", "decodedMatches = 0 encodedMatches = 0 for i in range(256): plain = i", "len(encoded) == 256 | round >= 0 | round < 8 | pSeed", "self.spBox.encode(plain) for i in range(256): self.assertTrue(self.spBox.seed[i] != 0) seed2 = self.spBox.getSeed() self.spBox.setSeed(seed) decoded", "(plain == decoded): decodedMatches += 1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches == 256) class", "def encode(self, plain: bytearray, seed: int) -> bytearray: \"\"\" Encodes a block of", "in range(256): if (plain[i] == encoded[i]): encodedMatches += 1 if (plain[i] == decoded[i]):", "== 256 | return[i] >= 0 | return[i] < 256 \"\"\" encoded =", "seedMatches = 0 for i in range(256): if (seed[i] == seed2[i]): seedMatches +=", "= None self.seeded = False def encode(self, plain: bytearray): returnvalue = bytearray() if", "Gets the seed. Returns: block of seed numbers | **Post:** | len(return) ==", "0): self.seed[i] = 1 return decoded def getSeed(self) -> bytearray: \"\"\" Gets the", "self.assertTrue(decodedMatches == 256) class SPBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i in", "= i for i in range(256): self.decodeMap[self.encodeMap[i]] = i def encode(self, plain: int)", "sBoxes: list of SBoxes used for substitution seed: seed pBox: PBox used for", "decode(self, encoded: bytearray) -> bytearray: \"\"\" Decodes a block of encoded numbers. Parameters:", "in range(256): self.seed[i] = plain[i] ^ self.seed[i] if (self.seed[i] == 0): self.seed[i] =", "bytearray() if self.buffer is not None: encoded = self.buffer+encoded self.buffer = None while", "str): password = bytearray() for c in pw: password.append(ord(c)) index = 0 while", "data Parameters: pw: password | **Pre:** | len(pw) == 2048 | **Post:** |", "256 \"\"\" encoded = bytearray(256) for i in range(256): seedAtI = self.seed[i] encoded[i]", "while len(encoded) >= 256: ba = bytearray() for i in range(256): ba.append(encoded.pop(0)) if", "test_simple(self): plain = bytearray() for i in range(256): plain.append(randint(0, 255)) length = len(plain)", "7, pSeed) for invertedI in range(7): i = 6-invertedI decoded = self.decodeRound(decoded, i,", "bytearray() if self.buffer is not None: plain = self.buffer+plain self.buffer = None if", "1 return decoded def getSeed(self) -> bytearray: \"\"\" Gets the seed. Returns: block", "self.sBox.encode(plain) decoded = self.sBox.decode(encoded) if (plain == encoded): encodedMatches += 1 if (plain", "self.encodeMap[index] = i for i in range(256*8): self.decodeMap[self.encodeMap[i]] = i def encode(self, plain:", "for i in range(length): if (plain[i] == decoded[i]): decodedMatches += 1 self.assertTrue(decodedMatches ==", "pSeed) for i in range(256): self.seed[i] = plain[i] ^ self.seed[i] if (self.seed[i] ==", "numbers. Parameters: encoded: block of encoded numbers seed: seed Returns: block of decoded", "range(8): if ((encoded[i]) & (1<<b)): index = self.decodeMap[indexVar+b]-seed if (index < 0): index", "1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches == 256) class SPBoxUnitTest(unittest.TestCase): def setUp(self): self.pw =", "0 | return[i] < 256 | **Modifies:** | self.seed[i] \"\"\" pSeed = 0", "in range(256*8): emptyCounter = 0 maxEmpty = 256*8-i targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter", "in range(2048): self.pw.append(randint(0, 255)) self.pBox = PBox(self.pw) def tearDown(self): self.pw = None self.pBox", "for i in range(256): if (plain[i] == encoded[i]): encodedMatches += 1 if (plain[i]", "self.buffer = encoded return returnvalue def close(self): return bytearray() class SBox: \"\"\" SBox", "encoded numbers. Parameters: encoded: block of encoded numbers seed: seed Returns: block of", "**Post:** | len(return) == 256 \"\"\" encoded = bytearray(256) for i in range(256):", "for SBox.encode() to improve performance encoded = self.pBox.encode(encoded, pSeed) return encoded def decodeRound(self,", "#TODO change to bytearray class SBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i", "improve performance encoded = self.pBox.encode(encoded, pSeed) return encoded def decodeRound(self, encoded: bytearray, round:", "def encode(self, plain: bytearray): returnvalue = bytearray() if self.buffer is not None: plain", "pw: str): password = bytearray() for c in pw: password.append(ord(c)) index = 0", "bytearray(2048) for i in range(2048): ppw[i] = pw[8*256+i] self.pBox: PBox = PBox(ppw) def", "block of plain numbers. Parameters: plain: block of plain numbers seed: seed Returns:", "index += 2048 index8 = int(index/8) decoded[index8] = decoded[index8]+(1<<(index%8)) return decoded class SPBox:", "numbers. Parameters: plain: block of plain numbers Returns: block of encoded numbers |", "may be edited by functions, no deepcopy needed #TODO change to bytearray class", "= None def test_simple(self): decodedMatches = 0 encodedMatches = 0 for i in", "\"\"\" decoded = bytearray(256) for i in range(256): indexVar = i*8 for b", "decoded[i] ^ self.seed[i] if (self.seed[i] == 0): self.seed[i] = 1 return decoded def", "return < 256 \"\"\" return self.decodeMap[encoded] class PBox: \"\"\" PBox is a transposition", "= bytearray() for i in range(2048): self.pw.append(randint(0, 255)) self.pBox = PBox(self.pw) def tearDown(self):", "256: self.buffer.append(randint(0, 255)) return self.encode(bytearray()) class Decoder: def __init__(self, pw: str): password =", "i for i in range(256): self.decodeMap[self.encodeMap[i]] = i def encode(self, plain: int) ->", "if (plain[i] == encoded[i]): encodedMatches += 1 if (plain[i] == decoded[i]): decodedMatches +=", "1 if (emptyCounter < targetEmpty): index = (index+1)%256 self.encodeMap[index] = i for i", "def encode(self, plain: bytearray) -> bytearray: \"\"\" Encodes a block of plain numbers.", "-> bytearray: \"\"\" Decodes a block of encoded numbers. Parameters: encoded: block of", "parameters may be edited by functions, no deepcopy needed #TODO change to bytearray", "setSeed(self, seed: bytearray): \"\"\" Sets the seed. Parameters: seed: block of seed numbers", "= True while len(plain) >= 256: ba = bytearray() for i in range(256):", "pw: bytearray, seed: bytearray = None): self.sBoxes: List[SBox] = [None]*8 if (seed is", "range(7): encoded = self.encodeRound(encoded, i+1, pSeed) for i in range(256): self.seed[i] = plain[i]", "self.spBox = SPBox(password) self.buffer = None self.seeded = False def encode(self, plain: bytearray):", "None self.sBox = None def test_simple(self): decodedMatches = 0 encodedMatches = 0 for", "block of seed numbers | **Pre:** | len(seed) == 256 | seed[i] >=", "\"\"\" def __init__(self, pw: bytearray): self.encodeMap: List[int] = [-1]*256 self.decodeMap: List[int] = [-1]*256", "the seed. Returns: block of seed numbers | **Post:** | len(return) == 256", "class SPBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i in range(4096): self.pw.append(randint(0, 255))", "encoded = bytearray(256) for i in range(256): seedAtI = self.seed[i] encoded[i] = plain[i]", "self.assertTrue(decodedMatches == length) # TODO encodeMatches self.assertTrue(seedMatches < 256/10) # TODO encode 2nd", "edited by functions, no deepcopy needed #TODO change to bytearray class SBoxUnitTest(unittest.TestCase): def", "if (self.encodeMap[index] == -1): emptyCounter += 1 if (emptyCounter < targetEmpty): index =", ">= 0 | seed < 256 | **Post:** | len(return) == 256 |", "i in range(256): self.assertTrue(self.spBox.seed[i] != 0) seed2 = self.spBox.getSeed() self.spBox.setSeed(seed) decoded = self.spBox.decode(encoded)", "self.buffer = plain return returnvalue def close(self): while len(self.buffer) < 256: self.buffer.append(randint(0, 255))", "seed for PBox Returns: block of decoded numbers | **Pre:** | len(encoded) ==", "i in range(256): indexVar = i*8+seed for b in range(8): if ((plain[i]) &", "& (1<<j)) != 0): decoded[i] = self.sBoxes[j].decodeMap[ decoded[i]] # replacement for SBox.decode() to", "= decoded[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI return decoded def encode(self, plain: bytearray) ->", "for permutation Parameters: pw: password seed: seed | **Pre:** | len(pw) == 4096", "return decoded def encode(self, plain: bytearray) -> bytearray: \"\"\" Encodes a block of", "1 self.spBox = SPBox(password) self.buffer = None self.seeded = False def encode(self, plain:", "class SPBox: \"\"\" SPBox is a substitution-permutation network. Attributes: sBoxes: list of SBoxes", "index = 0 for i in range(256): emptyCounter = 0 maxEmpty = 256-i", "\"\"\" def __init__(self, pw: bytearray, seed: bytearray = None): self.sBoxes: List[SBox] = [None]*8", "self.buffer.append(randint(0, 255)) return self.encode(bytearray()) class Decoder: def __init__(self, pw: str): password = bytearray()", "Encoder: def __init__(self, pw: str): password = bytearray() for c in pw: password.append(ord(c))", "\"\"\" decoded = self.pBox.decode(encoded, pSeed) for i in range(256): seedAtI = self.seed[i] for", "for i in range(256): indexVar = i*8 for b in range(8): if ((encoded[i])", "**Post:** | len(return) == 256 | **Modifies:** | self.seed[i] \"\"\" pSeed = 0", "bytearray() for i in range(2048): self.pw.append(randint(0, 255)) self.pBox = PBox(self.pw) def tearDown(self): self.pw", ">= 0 | self.encodeMap[i] < 256 | len(self.decodeMap) == 256 | self.decodeMap[i] >=", "for b in range(8): if ((plain[i]) & (1<<b)): index = self.encodeMap[(b+indexVar)%2048] index8 =", "decoded def getSeed(self) -> bytearray: \"\"\" Gets the seed. Returns: block of seed", "returnvalue = bytearray() if self.buffer is not None: plain = self.buffer+plain self.buffer =", "decode(self, encoded: int) -> int: \"\"\" Decodes a single encoded number. Parameters: encoded:", "| **Pre:** | len(pw) == 256 | **Post:** | len(self.encodeMap) == 256 |", "numbers round: iteration of decode pSeed: seed for PBox Returns: block of decoded", "len(return) == 256 | return[i] >= 0 | return[i] < 256 | **Modifies:**", ">= 0 | self.decodeMap[i] < 256 \"\"\" def __init__(self, pw: bytearray): self.encodeMap: List[int]", "| **Pre:** | len(pw) == 4096 | len(seed) == 256 | seed[i] >=", "< 8 | pSeed >= 0 | pSeed < 256 | **Post:** |", "def decode(self, encoded: bytearray, seed: int) -> List[int]: \"\"\" Decodes a block of", "for i in range(256): spw[i] = pw[s*256+i] self.sBoxes[s] = SBox(spw) ppw = bytearray(2048)", "\"\"\" Decodes a single encoded number. Parameters: encoded: encoded number Returns: decoded number", "range(256): ba.append(encoded.pop(0)) if (self.seeded): decoded = self.spBox.decode(ba) returnvalue.extend(decoded) else: self.spBox.setSeed(ba) self.seeded = True", "plain: block of plain numbers Returns: block of encoded numbers | **Pre:** |", "a substitution-permutation network. Attributes: sBoxes: list of SBoxes used for substitution seed: seed", "in range(4096): self.pw.append(randint(0, 255)) self.spBox = SPBox(self.pw) def tearDown(self): self.pw = None self.spBox", "**Post:** | len(self.sBoxes) == 8 | len(self.seed) == 256 | self.seed[i] >= 1", "= self.spBox.getSeed() for i in range(256): self.assertTrue(self.spBox.seed[i] != 0) encoded = self.spBox.encode(plain) for", "for i in range(256): self.pw.append(randint(0, 255)) self.sBox = SBox(self.pw) def tearDown(self): self.pw =", "| len(self.seed) == 256 | self.seed[i] >= 1 \"\"\" def __init__(self, pw: bytearray,", "| self.seed[i] >= 1 \"\"\" def __init__(self, pw: bytearray, seed: bytearray = None):", "encoded = self.spBox.encode(ba) returnvalue.extend(encoded) if len(plain) > 0: self.buffer = plain return returnvalue", "return decoded def getSeed(self) -> bytearray: \"\"\" Gets the seed. Returns: block of", "of encoded numbers seed: seed Returns: block of decoded numbers | **Pre:** |", "== 4096 | len(seed) == 256 | seed[i] >= 1 | **Post:** |", "= bytearray() for i in range(256): ba.append(encoded.pop(0)) if (self.seeded): decoded = self.spBox.decode(ba) returnvalue.extend(decoded)", "decode(self, encoded: bytearray): returnvalue = bytearray() if self.buffer is not None: encoded =", "self.sBox.decode(encoded) if (plain == encoded): encodedMatches += 1 if (plain == decoded): decodedMatches", "= None): self.sBoxes: List[SBox] = [None]*8 if (seed is None): seed = bytearray(256)", "self.seed: bytearray = seed for s in range(8): spw = bytearray(256) for i", "returnvalue.extend(ba) self.seeded = True while len(plain) >= 256: ba = bytearray() for i", "| **Post:** | len(self.encodeMap) == 256 | self.encodeMap[i] >= 0 | self.encodeMap[i] <", "is a substitution-permutation network. Attributes: sBoxes: list of SBoxes used for substitution seed:", "-> bytearray: \"\"\" Gets the seed. Returns: block of seed numbers | **Post:**", "| pSeed < 256 | **Post:** | len(return) == 256 \"\"\" encoded =", "bytearray() class SBox: \"\"\" SBox is a substitution cipher. Attributes: encodeMap: lookuptable used", "decoded = self.sBox.decode(encoded) if (plain == encoded): encodedMatches += 1 if (plain ==", "== 256) class SPBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i in range(4096):", "of decoded numbers | **Pre:** | len(encoded) == 256 | encoded[i] >= 0", "bytearray, round: int, pSeed: int) -> bytearray: \"\"\" Decodes a block of encoded", "setUp(self): self.pw = bytearray() for i in range(256): self.pw.append(randint(0, 255)) self.sBox = SBox(self.pw)", "== 8 | len(self.seed) == 256 | self.seed[i] >= 1 \"\"\" def __init__(self,", "255) self.seed: bytearray = seed for s in range(8): spw = bytearray(256) for", "used for permutation Parameters: pw: password seed: seed | **Pre:** | len(pw) ==", "self.pBox = None def test_simple(self): plain = bytearray() for i in range(256): plain.append(randint(0,", "self.encodeMap[index] = i for i in range(256): self.decodeMap[self.encodeMap[i]] = i def encode(self, plain:", "= 0 maxEmpty = 256-i targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter < targetEmpty): if", "encoded number Returns: decoded number | **Pre:** | encoded >= 0 | encoded", "< 2048 | len(self.decodeMap) == 2048 | self.decodeMap[i] >= 0 | self.decodeMap[i] <", "for c in pw: password.append(ord(c)) index = 0 while len(password) < 4096: password.append(ord(pw[index%len(pw)]))", "int(index/8) encoded[index8] = encoded[index8]+(1<<(index%8)) return encoded def decode(self, encoded: bytearray, seed: int) ->", "256 | encoded[i] >= 0 | encoded[i] < 256 | **Post:** | len(return)", "| len(return) == 256 | return[i] >= 0 | return[i] < 256 |", "[-1]*256 index = 0 for i in range(256): emptyCounter = 0 maxEmpty =", "i*8 for b in range(8): if ((encoded[i]) & (1<<b)): index = self.decodeMap[indexVar+b]-seed if", "**Pre:** | len(pw) == 256 | **Post:** | len(self.encodeMap) == 256 | self.encodeMap[i]", "in range(256): pSeed = (pSeed+self.seed[i])%256 encoded = self.encodeRound(plain, 0, pSeed) for i in", "= [None]*8 if (seed is None): seed = bytearray(256) for i in range(256):", "of plain numbers. Parameters: plain: block of plain numbers Returns: block of encoded", "= self.pBox.decode(encoded, pSeed) for i in range(256): seedAtI = self.seed[i] for invertedJ in", "| **Pre:** | encoded >= 0 | encoded < 256 | **Post:** |", "Decodes a block of encoded numbers. Parameters: encoded: block of encoded numbers round:", "int: \"\"\" Decodes a single encoded number. Parameters: encoded: encoded number Returns: decoded", "pSeed = (pSeed+self.seed[i])%256 decoded = self.decodeRound(encoded, 7, pSeed) for invertedI in range(7): i", "seed[i] >= 1 | **Post:** | len(self.sBoxes) == 8 | len(self.seed) == 256", "encoded >= 0 | encoded < 256 | **Post:** | return >= 0", "1 \"\"\" def __init__(self, pw: bytearray, seed: bytearray = None): self.sBoxes: List[SBox] =", "a block of encoded numbers. Parameters: encoded: block of encoded numbers seed: seed", "pw[8*256+i] self.pBox: PBox = PBox(ppw) def encodeRound(self, plain: bytearray, round: int, pSeed: int)", "1 if (plain == decoded): decodedMatches += 1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches ==", "seed Returns: block of encoded numbers | **Pre:** | len(plain) == 256 |", "= bytearray() for c in pw: password.append(ord(c)) index = 0 while len(password) <", "def getSeed(self) -> bytearray: \"\"\" Gets the seed. Returns: block of seed numbers", "encoded = self.encodeRound(encoded, i+1, pSeed) for i in range(256): self.seed[i] = plain[i] ^", "bytearray(256) for i in range(256): seedAtI = self.seed[i] encoded[i] = plain[i] ^ self.sBoxes[round].encodeMap[i]", "for i in range(256): seed[i] = randint(1, 255) self.seed: bytearray = seed for", "== 256 | self.seed[i] >= 1 \"\"\" def __init__(self, pw: bytearray, seed: bytearray", "255)) self.sBox = SBox(self.pw) def tearDown(self): self.pw = None self.sBox = None def", "setUp(self): self.pw = bytearray() for i in range(2048): self.pw.append(randint(0, 255)) self.pBox = PBox(self.pw)", "| len(seed) == 256 | seed[i] >= 1 | **Modifies:** | self.seed[i] \"\"\"", "None while len(encoded) >= 256: ba = bytearray() for i in range(256): ba.append(encoded.pop(0))", "| **Post:** | len(return) == 256 \"\"\" decoded = self.pBox.decode(encoded, pSeed) for i", "i in range(256): ba.append(encoded.pop(0)) if (self.seeded): decoded = self.spBox.decode(ba) returnvalue.extend(decoded) else: self.spBox.setSeed(ba) self.seeded", "len(plain) == 256 | round >= 0 | round < 8 | pSeed", "0 for i in range(256): if (seed[i] == seed2[i]): seedMatches += 1 for", "2048 | **Post:** | len(self.encodeMap) == 2048 | self.encodeMap[i] >= 0 | self.encodeMap[i]", "self.pw = bytearray() for i in range(2048): self.pw.append(randint(0, 255)) self.pBox = PBox(self.pw) def", "**Post:** | len(self.encodeMap) == 2048 | self.encodeMap[i] >= 0 | self.encodeMap[i] < 2048", "self.spBox.getSeed() for i in range(256): self.assertTrue(self.spBox.seed[i] != 0) encoded = self.spBox.encode(plain) for i", "decoded = self.pBox.decode(encoded, seed) decodedMatches = 0 encodedMatches = 0 for i in", "returnvalue def close(self): return bytearray() class SBox: \"\"\" SBox is a substitution cipher.", "decoded[i]): decodedMatches += 1 self.assertTrue(decodedMatches == length) # TODO encodeMatches self.assertTrue(seedMatches < 256/10)", "class Decoder: def __init__(self, pw: str): password = bytearray() for c in pw:", "cipher. Attributes: encodeMap: lookuptable used to encode data decodeMap: lookuptable used to decode", "__init__(self, pw: bytearray): self.encodeMap: List[int] = [-1]*(256*8) self.decodeMap: List[int] = [-1]*(256*8) index =", "Returns: block of decoded numbers | **Pre:** | len(encoded) == 256 | round", "\"\"\" seed = bytearray(256) for i in range(256): seed[i] = self.seed[i] return seed", "in range(256): seedAtI = self.seed[i] for invertedJ in range(8): j = 8-1-invertedJ if", "numbers | **Post:** | len(return) == 256 | return[i] >= 1 \"\"\" seed", "encode pSeed: seed for PBox Returns: block of encoded numbers | **Pre:** |", "0 | self.encodeMap[i] < 2048 | len(self.decodeMap) == 2048 | self.decodeMap[i] >= 0", "= bytearray() for i in range(256): plain.append(randint(0, 255)) length = len(plain) seed =", "| return >= 0 | return < 256 \"\"\" return self.encodeMap[plain] def decode(self,", "== length) # TODO encodeMatches self.assertTrue(seedMatches < 256/10) # TODO encode 2nd batch#plain", "block of encoded numbers Returns: block of decoded numbers | **Pre:** | len(encoded)", "| encoded[i] < 256 | **Post:** | len(return) == 256 | return[i] >=", ">= 0 | encoded[i] < 256 | **Post:** | len(return) == 256 |", "^ self.sBoxes[round].encodeMap[i] ^ seedAtI for j in range(8): if ((seedAtI & (1<<j)) !=", "for substitution seed: seed pBox: PBox used for permutation Parameters: pw: password seed:", "len(self.seed) == 256 | self.seed[i] >= 1 \"\"\" def __init__(self, pw: bytearray, seed:", "^ self.sBoxes[round].encodeMap[i] ^ seedAtI return decoded def encode(self, plain: bytearray) -> bytearray: \"\"\"", "while len(password) < 4096: password.append(ord(pw[index%len(pw)])) index += 1 self.spBox = SPBox(password) self.buffer =", "in range(256): if (seed[i] == seed2[i]): seedMatches += 1 for i in range(length):", "bytearray, seed: int) -> List[int]: \"\"\" Decodes a block of encoded numbers. Parameters:", "| self.decodeMap[i] >= 0 | self.decodeMap[i] < 2048 \"\"\" def __init__(self, pw: bytearray):", "for j in range(8): if ((seedAtI & (1<<j)) != 0): encoded[i] = self.sBoxes[j].encodeMap[", "bytearray): self.encodeMap: List[int] = [-1]*256 self.decodeMap: List[int] = [-1]*256 index = 0 for", "< 256 \"\"\" encoded = bytearray(256) for i in range(256): indexVar = i*8+seed", "password = bytearray() for c in pw: password.append(ord(c)) index = 0 while len(password)", "| **Modifies:** | self.seed[i] \"\"\" pSeed = 0 for i in range(256): pSeed", "substitution cipher. Attributes: encodeMap: lookuptable used to encode data decodeMap: lookuptable used to", "1 return encoded def decode(self, encoded: bytearray) -> bytearray: \"\"\" Decodes a block", "pSeed: seed for PBox Returns: block of decoded numbers | **Pre:** | len(encoded)", "def test_simple(self): plain = bytearray() for i in range(256): plain.append(randint(0, 255)) length =", "PBox(self.pw) def tearDown(self): self.pw = None self.pBox = None def test_simple(self): plain =", "encodedMatches += 1 if (plain == decoded): decodedMatches += 1 self.assertTrue(encodedMatches < 256/10)", "| **Post:** | len(return) == 256 | **Modifies:** | self.seed[i] \"\"\" pSeed =", "self.decodeRound(decoded, i, pSeed) for i in range(256): self.seed[i] = decoded[i] ^ self.seed[i] if", "numbers. Parameters: encoded: block of encoded numbers round: iteration of decode pSeed: seed", "\"\"\" Encodes a block of plain numbers. Parameters: plain: block of plain numbers", "in range(256): seed[i] = randint(1, 255) self.seed: bytearray = seed for s in", "(index < 0): index += 2048 index8 = int(index/8) decoded[index8] = decoded[index8]+(1<<(index%8)) return", "= 0 maxEmpty = 256*8-i targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter < targetEmpty): if", "round: iteration of encode pSeed: seed for PBox Returns: block of encoded numbers", "pw: bytearray): self.encodeMap: List[int] = [-1]*(256*8) self.decodeMap: List[int] = [-1]*(256*8) index = 0", "| len(return) == 256 \"\"\" decoded = self.pBox.decode(encoded, pSeed) for i in range(256):", "= encoded return returnvalue def close(self): return bytearray() class SBox: \"\"\" SBox is", "return[i] < 256 \"\"\" decoded = bytearray(256) for i in range(256): indexVar =", "256*8-i targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter < targetEmpty): if (self.encodeMap[index] == -1): emptyCounter", "self.assertTrue(self.spBox.seed[i] != 0) seed2 = self.spBox.getSeed() self.spBox.setSeed(seed) decoded = self.spBox.decode(encoded) decodedMatches = 0", "of decoded numbers | **Pre:** | len(encoded) == 256 | round >= 0", "ppw[i] = pw[8*256+i] self.pBox: PBox = PBox(ppw) def encodeRound(self, plain: bytearray, round: int,", "Parameters: pw: password | **Pre:** | len(pw) == 2048 | **Post:** | len(self.encodeMap)", "def encodeRound(self, plain: bytearray, round: int, pSeed: int) -> bytearray: \"\"\" Encodes a", "| **Pre:** | len(encoded) == 256 | round >= 0 | round <", "plain = i encoded = self.sBox.encode(plain) decoded = self.sBox.decode(encoded) if (plain == encoded):", "= 256*8-i targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter < targetEmpty): if (self.encodeMap[index] == -1):", "= i def encode(self, plain: bytearray, seed: int) -> bytearray: \"\"\" Encodes a", "== decoded[i]): decodedMatches += 1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches == 256) class SPBoxUnitTest(unittest.TestCase):", "if self.buffer is not None: plain = self.buffer+plain self.buffer = None if (not", "+= 1 if (plain[i] == decoded[i]): decodedMatches += 1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches", "= bytearray() if self.buffer is not None: encoded = self.buffer+encoded self.buffer = None", "length = len(plain) seed = self.spBox.getSeed() for i in range(256): self.assertTrue(self.spBox.seed[i] != 0)", "Decodes a single encoded number. Parameters: encoded: encoded number Returns: decoded number |", "False def decode(self, encoded: bytearray): returnvalue = bytearray() if self.buffer is not None:", "| return[i] < 256 \"\"\" decoded = bytearray(256) for i in range(256): indexVar", "(1<<b)): index = self.encodeMap[(b+indexVar)%2048] index8 = int(index/8) encoded[index8] = encoded[index8]+(1<<(index%8)) return encoded def", "self.encode(bytearray()) class Decoder: def __init__(self, pw: str): password = bytearray() for c in", "4096 | len(seed) == 256 | seed[i] >= 1 | **Post:** | len(self.sBoxes)", "pSeed: int) -> bytearray: \"\"\" Decodes a block of encoded numbers. Parameters: encoded:", "self.encodeRound(encoded, i+1, pSeed) for i in range(256): self.seed[i] = plain[i] ^ self.seed[i] if", "for i in range(256): pSeed = (pSeed+self.seed[i])%256 decoded = self.decodeRound(encoded, 7, pSeed) for", "of encoded numbers. Parameters: encoded: block of encoded numbers Returns: block of decoded", "| **Post:** | len(self.sBoxes) == 8 | len(self.seed) == 256 | self.seed[i] >=", "8 | len(self.seed) == 256 | self.seed[i] >= 1 \"\"\" def __init__(self, pw:", "range(2048): ppw[i] = pw[8*256+i] self.pBox: PBox = PBox(ppw) def encodeRound(self, plain: bytearray, round:", "return returnvalue def close(self): while len(self.buffer) < 256: self.buffer.append(randint(0, 255)) return self.encode(bytearray()) class", "= int(index/8) decoded[index8] = decoded[index8]+(1<<(index%8)) return decoded class SPBox: \"\"\" SPBox is a", "# TODO change general parameter policy: all parameters may be edited by functions,", "encoded: encoded number Returns: decoded number | **Pre:** | encoded >= 0 |", "Tuple, List class Encoder: def __init__(self, pw: str): password = bytearray() for c", "self.sBoxes[round].encodeMap[i] ^ seedAtI return decoded def encode(self, plain: bytearray) -> bytearray: \"\"\" Encodes", "PBox Returns: block of decoded numbers | **Pre:** | len(encoded) == 256 |", "if (plain == encoded): encodedMatches += 1 if (plain == decoded): decodedMatches +=", "index8 = int(index/8) decoded[index8] = decoded[index8]+(1<<(index%8)) return decoded class SPBox: \"\"\" SPBox is", "self.seed[i] \"\"\" for i in range(256): self.seed[i] = seed[i] # TODO change general", "int) -> int: \"\"\" Encodes a single plain number. Parameters: plain: plain number", "for i in range(256): ba.append(encoded.pop(0)) if (self.seeded): decoded = self.spBox.decode(ba) returnvalue.extend(decoded) else: self.spBox.setSeed(ba)", "a block of encoded numbers. Parameters: encoded: block of encoded numbers round: iteration", "plain: int) -> int: \"\"\" Encodes a single plain number. Parameters: plain: plain", "range(8): spw = bytearray(256) for i in range(256): spw[i] = pw[s*256+i] self.sBoxes[s] =", "self.decodeRound(encoded, 7, pSeed) for invertedI in range(7): i = 6-invertedI decoded = self.decodeRound(decoded,", "= None self.sBox = None def test_simple(self): decodedMatches = 0 encodedMatches = 0", "= 1 return encoded def decode(self, encoded: bytearray) -> bytearray: \"\"\" Decodes a", "pw[s*256+i] self.sBoxes[s] = SBox(spw) ppw = bytearray(2048) for i in range(2048): ppw[i] =", "for i in range(2048): ppw[i] = pw[8*256+i] self.pBox: PBox = PBox(ppw) def encodeRound(self,", "**Post:** | return >= 0 | return < 256 \"\"\" return self.decodeMap[encoded] class", "class PBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i in range(2048): self.pw.append(randint(0, 255))", "plain = self.buffer+plain self.buffer = None if (not self.seeded): ba = self.spBox.getSeed() returnvalue.extend(ba)", "== 0): self.seed[i] = 1 return decoded def getSeed(self) -> bytearray: \"\"\" Gets", "pw: password seed: seed | **Pre:** | len(pw) == 4096 | len(seed) ==", "== 256 | seed >= 0 | seed < 256 | **Post:** |", "0 for i in range(256): emptyCounter = 0 maxEmpty = 256-i targetEmpty =", "List[int]: \"\"\" Decodes a block of encoded numbers. Parameters: encoded: block of encoded", "i for i in range(256*8): self.decodeMap[self.encodeMap[i]] = i def encode(self, plain: bytearray, seed:", "numbers. Parameters: encoded: block of encoded numbers Returns: block of decoded numbers |", "self.spBox.setSeed(ba) self.seeded = True if len(encoded) > 0: self.buffer = encoded return returnvalue", "in range(256): self.seed[i] = decoded[i] ^ self.seed[i] if (self.seed[i] == 0): self.seed[i] =", "0 | return < 256 \"\"\" return self.encodeMap[plain] def decode(self, encoded: int) ->", "self.encodeMap[i] >= 0 | self.encodeMap[i] < 2048 | len(self.decodeMap) == 2048 | self.decodeMap[i]", "plain numbers. Parameters: plain: block of plain numbers seed: seed Returns: block of", "range(256*8): emptyCounter = 0 maxEmpty = 256*8-i targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter <", "= 8-1-invertedJ if ((seedAtI & (1<<j)) != 0): decoded[i] = self.sBoxes[j].decodeMap[ decoded[i]] #", "in range(256): plain = i encoded = self.sBox.encode(plain) decoded = self.sBox.decode(encoded) if (plain", "for i in range(256): self.seed[i] = seed[i] # TODO change general parameter policy:", "if len(encoded) > 0: self.buffer = encoded return returnvalue def close(self): return bytearray()", "Encodes a block of plain numbers. Parameters: plain: block of plain numbers round:", "range(256): seed[i] = randint(1, 255) self.seed: bytearray = seed for s in range(8):", "PBox is a transposition cipher. Attributes: encodeMap: lookuptable used to encode data decodeMap:", "encoded numbers | **Pre:** | len(plain) == 256 | seed >= 0 |", "seed def setSeed(self, seed: bytearray): \"\"\" Sets the seed. Parameters: seed: block of", "plain numbers. Parameters: plain: block of plain numbers round: iteration of encode pSeed:", "256 | seed >= 0 | seed < 256 | **Post:** | len(return)", "numbers Returns: block of encoded numbers | **Pre:** | len(plain) == 256 |", "^ self.seed[i] if (self.seed[i] == 0): self.seed[i] = 1 return encoded def decode(self,", "= [-1]*(256*8) self.decodeMap: List[int] = [-1]*(256*8) index = 0 for i in range(256*8):", "= self.decodeMap[indexVar+b]-seed if (index < 0): index += 2048 index8 = int(index/8) decoded[index8]", "i = 6-invertedI decoded = self.decodeRound(decoded, i, pSeed) for i in range(256): self.seed[i]", "decoded = self.spBox.decode(encoded) decodedMatches = 0 seedMatches = 0 for i in range(256):", "self.pBox: PBox = PBox(ppw) def encodeRound(self, plain: bytearray, round: int, pSeed: int) ->", "if (plain[i] == decoded[i]): decodedMatches += 1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches == 256)", "self.buffer is not None: plain = self.buffer+plain self.buffer = None if (not self.seeded):", "return self.encode(bytearray()) class Decoder: def __init__(self, pw: str): password = bytearray() for c", "| **Post:** | return >= 0 | return < 256 \"\"\" return self.encodeMap[plain]", "plain number Returns: encoded number | **Pre:** | plain >= 0 | plain", "encoded numbers | **Pre:** | len(plain) == 256 | round >= 0 |", "plain.append(randint(0, 255)) length = len(plain) seed = self.spBox.getSeed() for i in range(256): self.assertTrue(self.spBox.seed[i]", "for PBox Returns: block of encoded numbers | **Pre:** | len(plain) == 256", "\"\"\" encoded = bytearray(256) for i in range(256): seedAtI = self.seed[i] encoded[i] =", "= len(plain) seed = self.spBox.getSeed() for i in range(256): self.assertTrue(self.spBox.seed[i] != 0) encoded", "== decoded[i]): decodedMatches += 1 self.assertTrue(decodedMatches == length) # TODO encodeMatches self.assertTrue(seedMatches <", "decoded number | **Pre:** | encoded >= 0 | encoded < 256 |", "encoded: block of encoded numbers seed: seed Returns: block of decoded numbers |", "= i*8+seed for b in range(8): if ((plain[i]) & (1<<b)): index = self.encodeMap[(b+indexVar)%2048]", "numbers | **Pre:** | len(seed) == 256 | seed[i] >= 1 | **Modifies:**", "return[i] < 256 \"\"\" encoded = bytearray(256) for i in range(256): indexVar =", "| plain >= 0 | plain < 256 | **Post:** | return >=", "general parameter policy: all parameters may be edited by functions, no deepcopy needed", "= self.sBoxes[j].decodeMap[ decoded[i]] # replacement for SBox.decode() to improve performance decoded[i] = decoded[i]", "in range(256): plain.append(randint(0, 255)) length = len(plain) seed = self.spBox.getSeed() for i in", "from random import randint import unittest from typing import Dict, Tuple, List class", "bytearray = seed for s in range(8): spw = bytearray(256) for i in", "(emptyCounter < targetEmpty): if (self.encodeMap[index] == -1): emptyCounter += 1 if (emptyCounter <", ">= 1 | **Post:** | len(self.sBoxes) == 8 | len(self.seed) == 256 |", "(self.seed[i] == 0): self.seed[i] = 1 return encoded def decode(self, encoded: bytearray) ->", "\"\"\" SBox is a substitution cipher. Attributes: encodeMap: lookuptable used to encode data", "= 0 encodedMatches = 0 for i in range(256): if (plain[i] == encoded[i]):", "= self.spBox.decode(ba) returnvalue.extend(decoded) else: self.spBox.setSeed(ba) self.seeded = True if len(encoded) > 0: self.buffer", "round < 8 | pSeed >= 0 | pSeed < 256 | **Post:**", "is a substitution cipher. Attributes: encodeMap: lookuptable used to encode data decodeMap: lookuptable", "import unittest from typing import Dict, Tuple, List class Encoder: def __init__(self, pw:", "number. Parameters: encoded: encoded number Returns: decoded number | **Pre:** | encoded >=", "== 256 \"\"\" encoded = bytearray(256) for i in range(256): seedAtI = self.seed[i]", "\"\"\" return self.decodeMap[encoded] class PBox: \"\"\" PBox is a transposition cipher. Attributes: encodeMap:", "by functions, no deepcopy needed #TODO change to bytearray class SBoxUnitTest(unittest.TestCase): def setUp(self):", "for i in range(256): ba.append(plain.pop(0)) encoded = self.spBox.encode(ba) returnvalue.extend(encoded) if len(plain) > 0:", "def tearDown(self): self.pw = None self.spBox = None def test_simple(self): plain = bytearray()", "\"\"\" Decodes a block of encoded numbers. Parameters: encoded: block of encoded numbers", "range(256): self.seed[i] = seed[i] # TODO change general parameter policy: all parameters may", "== 2048 | self.decodeMap[i] >= 0 | self.decodeMap[i] < 2048 \"\"\" def __init__(self,", "Parameters: encoded: block of encoded numbers seed: seed Returns: block of decoded numbers", "range(256): plain.append(randint(0, 255)) length = len(plain) seed = self.spBox.getSeed() for i in range(256):", "encodedMatches = 0 for i in range(256): if (plain[i] == encoded[i]): encodedMatches +=", "256 \"\"\" return self.decodeMap[encoded] class PBox: \"\"\" PBox is a transposition cipher. Attributes:", "((seedAtI & (1<<j)) != 0): encoded[i] = self.sBoxes[j].encodeMap[ encoded[i]] # replacement for SBox.encode()", "False def encode(self, plain: bytearray): returnvalue = bytearray() if self.buffer is not None:", "i in range(256): indexVar = i*8 for b in range(8): if ((encoded[i]) &", "= self.seed[i] return seed def setSeed(self, seed: bytearray): \"\"\" Sets the seed. Parameters:", "| len(encoded) == 256 | seed >= 0 | seed < 256 |", "for i in range(256*8): self.decodeMap[self.encodeMap[i]] = i def encode(self, plain: bytearray, seed: int)", "self.sBoxes[s] = SBox(spw) ppw = bytearray(2048) for i in range(2048): ppw[i] = pw[8*256+i]", "PBox Returns: block of encoded numbers | **Pre:** | len(plain) == 256 |", "| encoded[i] >= 0 | encoded[i] < 256 | **Post:** | len(return) ==", "= SBox(self.pw) def tearDown(self): self.pw = None self.sBox = None def test_simple(self): decodedMatches", "def setUp(self): self.pw = bytearray() for i in range(2048): self.pw.append(randint(0, 255)) self.pBox =", "range(256): indexVar = i*8+seed for b in range(8): if ((plain[i]) & (1<<b)): index", "block of encoded numbers | **Pre:** | len(plain) == 256 | round >=", "len(plain) == 256 | **Post:** | len(return) == 256 | **Modifies:** | self.seed[i]", "range(256*8): self.decodeMap[self.encodeMap[i]] = i def encode(self, plain: bytearray, seed: int) -> bytearray: \"\"\"", "plain[i] ^ self.seed[i] if (self.seed[i] == 0): self.seed[i] = 1 return encoded def", "= pw[8*256+i] self.pBox: PBox = PBox(ppw) def encodeRound(self, plain: bytearray, round: int, pSeed:", "numbers | **Pre:** | len(encoded) == 256 | encoded[i] >= 0 | encoded[i]", "import Dict, Tuple, List class Encoder: def __init__(self, pw: str): password = bytearray()", "256 | **Modifies:** | self.seed[i] \"\"\" pSeed = 0 for i in range(256):", "= i encoded = self.sBox.encode(plain) decoded = self.sBox.decode(encoded) if (plain == encoded): encodedMatches", "plain numbers Returns: block of encoded numbers | **Pre:** | len(plain) == 256", "| self.encodeMap[i] >= 0 | self.encodeMap[i] < 2048 | len(self.decodeMap) == 2048 |", "encoded[i] = self.sBoxes[j].encodeMap[ encoded[i]] # replacement for SBox.encode() to improve performance encoded =", "return encoded def decode(self, encoded: bytearray) -> bytearray: \"\"\" Decodes a block of", "encoded def decode(self, encoded: bytearray) -> bytearray: \"\"\" Decodes a block of encoded", "def setUp(self): self.pw = bytearray() for i in range(256): self.pw.append(randint(0, 255)) self.sBox =", "i in range(256): plain.append(randint(0, 255)) length = len(plain) seed = self.spBox.getSeed() for i", "self.pw.append(randint(0, 255)) self.pBox = PBox(self.pw) def tearDown(self): self.pw = None self.pBox = None", "no deepcopy needed #TODO change to bytearray class SBoxUnitTest(unittest.TestCase): def setUp(self): self.pw =", "index = self.encodeMap[(b+indexVar)%2048] index8 = int(index/8) encoded[index8] = encoded[index8]+(1<<(index%8)) return encoded def decode(self,", "decoded numbers | **Pre:** | len(encoded) == 256 | round >= 0 |", "== encoded): encodedMatches += 1 if (plain == decoded): decodedMatches += 1 self.assertTrue(encodedMatches", "plain = bytearray() for i in range(256): plain.append(randint(0, 255)) length = len(plain) seed", "| return[i] < 256 \"\"\" encoded = bytearray(256) for i in range(256): indexVar", "single encoded number. Parameters: encoded: encoded number Returns: decoded number | **Pre:** |", "i def encode(self, plain: bytearray, seed: int) -> bytearray: \"\"\" Encodes a block", "plain.append(randint(0, 255)) for seed in range(256): encoded = self.pBox.encode(plain, seed) decoded = self.pBox.decode(encoded,", "encoded[i] < 256 | **Post:** | len(return) == 256 | return[i] >= 0", "decode(self, encoded: bytearray, seed: int) -> List[int]: \"\"\" Decodes a block of encoded", ">= 0 | pSeed < 256 | **Post:** | len(return) == 256 \"\"\"", "< 256 \"\"\" return self.decodeMap[encoded] class PBox: \"\"\" PBox is a transposition cipher.", "| return[i] >= 0 | return[i] < 256 \"\"\" encoded = bytearray(256) for", "self.sBoxes[j].decodeMap[ decoded[i]] # replacement for SBox.decode() to improve performance decoded[i] = decoded[i] ^", "self.pw = None self.pBox = None def test_simple(self): plain = bytearray() for i", "self.spBox.getSeed() returnvalue.extend(ba) self.seeded = True while len(plain) >= 256: ba = bytearray() for", "return seed def setSeed(self, seed: bytearray): \"\"\" Sets the seed. Parameters: seed: block", "used to decode data Parameters: pw: password | **Pre:** | len(pw) == 256", "**Pre:** | len(plain) == 256 | round >= 0 | round < 8", "self.spBox.getSeed() self.spBox.setSeed(seed) decoded = self.spBox.decode(encoded) decodedMatches = 0 seedMatches = 0 for i", "^ self.seed[i] if (self.seed[i] == 0): self.seed[i] = 1 return decoded def getSeed(self)", "int) -> bytearray: \"\"\" Encodes a block of plain numbers. Parameters: plain: block", "== 256 | seed[i] >= 1 | **Post:** | len(self.sBoxes) == 8 |", "256 | **Post:** | return >= 0 | return < 256 \"\"\" return", "block of plain numbers round: iteration of encode pSeed: seed for PBox Returns:", "block of encoded numbers | **Pre:** | len(plain) == 256 | seed >=", "<gh_stars>0 from random import randint import unittest from typing import Dict, Tuple, List", "range(256): spw[i] = pw[s*256+i] self.sBoxes[s] = SBox(spw) ppw = bytearray(2048) for i in", "encoded = bytearray(256) for i in range(256): indexVar = i*8+seed for b in", "& (1<<j)) != 0): encoded[i] = self.sBoxes[j].encodeMap[ encoded[i]] # replacement for SBox.encode() to", "a block of plain numbers. Parameters: plain: block of plain numbers Returns: block", "SBox(self.pw) def tearDown(self): self.pw = None self.sBox = None def test_simple(self): decodedMatches =", "for i in range(256): seed[i] = self.seed[i] return seed def setSeed(self, seed: bytearray):", "0 seedMatches = 0 for i in range(256): if (seed[i] == seed2[i]): seedMatches", "for i in range(256): self.seed[i] = decoded[i] ^ self.seed[i] if (self.seed[i] == 0):", "all parameters may be edited by functions, no deepcopy needed #TODO change to", "if (seed[i] == seed2[i]): seedMatches += 1 for i in range(length): if (plain[i]", "int: \"\"\" Encodes a single plain number. Parameters: plain: plain number Returns: encoded", "= bytearray(256) for i in range(256): seed[i] = self.seed[i] return seed def setSeed(self,", "0 for i in range(256): pSeed = (pSeed+self.seed[i])%256 encoded = self.encodeRound(plain, 0, pSeed)", "= None while len(encoded) >= 256: ba = bytearray() for i in range(256):", "seed >= 0 | seed < 256 | **Post:** | len(return) == 256", "len(self.encodeMap) == 256 | self.encodeMap[i] >= 0 | self.encodeMap[i] < 256 | len(self.decodeMap)", "encoded numbers. Parameters: encoded: block of encoded numbers Returns: block of decoded numbers", "\"\"\" SPBox is a substitution-permutation network. Attributes: sBoxes: list of SBoxes used for", "| return < 256 \"\"\" return self.decodeMap[encoded] class PBox: \"\"\" PBox is a", "bytearray): \"\"\" Sets the seed. Parameters: seed: block of seed numbers | **Pre:**", "for i in range(7): encoded = self.encodeRound(encoded, i+1, pSeed) for i in range(256):", "0 | pSeed < 256 | **Post:** | len(return) == 256 \"\"\" decoded", "seed for PBox Returns: block of encoded numbers | **Pre:** | len(plain) ==", "self.decodeMap[encoded] class PBox: \"\"\" PBox is a transposition cipher. Attributes: encodeMap: lookuptable used", "^ seedAtI return decoded def encode(self, plain: bytearray) -> bytearray: \"\"\" Encodes a", "== decoded): decodedMatches += 1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches == 256) class PBoxUnitTest(unittest.TestCase):", "(emptyCounter < targetEmpty): index = (index+1)%(256*8) self.encodeMap[index] = i for i in range(256*8):", "| self.seed[i] \"\"\" for i in range(256): self.seed[i] = seed[i] # TODO change", "for i in range(256): emptyCounter = 0 maxEmpty = 256-i targetEmpty = 1+(pw[i]%maxEmpty)", "len(self.encodeMap) == 2048 | self.encodeMap[i] >= 0 | self.encodeMap[i] < 2048 | len(self.decodeMap)", "1 for i in range(length): if (plain[i] == decoded[i]): decodedMatches += 1 self.assertTrue(decodedMatches", "i in range(256): if (plain[i] == encoded[i]): encodedMatches += 1 if (plain[i] ==", "!= 0): encoded[i] = self.sBoxes[j].encodeMap[ encoded[i]] # replacement for SBox.encode() to improve performance", "single plain number. Parameters: plain: plain number Returns: encoded number | **Pre:** |", ">= 1 \"\"\" def __init__(self, pw: bytearray, seed: bytearray = None): self.sBoxes: List[SBox]", "permutation Parameters: pw: password seed: seed | **Pre:** | len(pw) == 4096 |", "def tearDown(self): self.pw = None self.pBox = None def test_simple(self): plain = bytearray()", "numbers | **Pre:** | len(plain) == 256 | seed >= 0 | seed", "Parameters: plain: block of plain numbers seed: seed Returns: block of encoded numbers", "Parameters: encoded: block of encoded numbers round: iteration of decode pSeed: seed for", "i+1, pSeed) for i in range(256): self.seed[i] = plain[i] ^ self.seed[i] if (self.seed[i]", "List[SBox] = [None]*8 if (seed is None): seed = bytearray(256) for i in", "0) seed2 = self.spBox.getSeed() self.spBox.setSeed(seed) decoded = self.spBox.decode(encoded) decodedMatches = 0 seedMatches =", "< targetEmpty): index = (index+1)%256 self.encodeMap[index] = i for i in range(256): self.decodeMap[self.encodeMap[i]]", "= 0 for i in range(256): if (plain[i] == encoded[i]): encodedMatches += 1", "len(return) == 256 | return[i] >= 0 | return[i] < 256 \"\"\" encoded", "self.pw.append(randint(0, 255)) self.spBox = SPBox(self.pw) def tearDown(self): self.pw = None self.spBox = None", "change general parameter policy: all parameters may be edited by functions, no deepcopy", "self.encodeMap[i] < 2048 | len(self.decodeMap) == 2048 | self.decodeMap[i] >= 0 | self.decodeMap[i]", "for i in range(256): plain.append(randint(0, 255)) length = len(plain) seed = self.spBox.getSeed() for", "Sets the seed. Parameters: seed: block of seed numbers | **Pre:** | len(seed)", "= bytearray() if self.buffer is not None: plain = self.buffer+plain self.buffer = None", "SBox(spw) ppw = bytearray(2048) for i in range(2048): ppw[i] = pw[8*256+i] self.pBox: PBox", "255)) for seed in range(256): encoded = self.pBox.encode(plain, seed) decoded = self.pBox.decode(encoded, seed)", "1 | **Post:** | len(self.sBoxes) == 8 | len(self.seed) == 256 | self.seed[i]", "index = (index+1)%(256*8) self.encodeMap[index] = i for i in range(256*8): self.decodeMap[self.encodeMap[i]] = i", "1 if (emptyCounter < targetEmpty): index = (index+1)%(256*8) self.encodeMap[index] = i for i", "in range(256): plain.append(randint(0, 255)) for seed in range(256): encoded = self.pBox.encode(plain, seed) decoded", "seed: int) -> bytearray: \"\"\" Encodes a block of plain numbers. Parameters: plain:", "lookuptable used to encode data decodeMap: lookuptable used to decode data Parameters: pw:", "2048 | self.encodeMap[i] >= 0 | self.encodeMap[i] < 2048 | len(self.decodeMap) == 2048", "= plain[i] ^ self.seed[i] if (self.seed[i] == 0): self.seed[i] = 1 return encoded", "< 256 | **Post:** | len(return) == 256 \"\"\" encoded = bytearray(256) for", "performance encoded = self.pBox.encode(encoded, pSeed) return encoded def decodeRound(self, encoded: bytearray, round: int,", "bytearray) -> bytearray: \"\"\" Encodes a block of plain numbers. Parameters: plain: block", "encodedMatches = 0 for i in range(256): plain = i encoded = self.sBox.encode(plain)", "return[i] >= 0 | return[i] < 256 \"\"\" encoded = bytearray(256) for i", "-> List[int]: \"\"\" Decodes a block of encoded numbers. Parameters: encoded: block of", "of encode pSeed: seed for PBox Returns: block of encoded numbers | **Pre:**", "for invertedI in range(7): i = 6-invertedI decoded = self.decodeRound(decoded, i, pSeed) for", "encoded[index8]+(1<<(index%8)) return encoded def decode(self, encoded: bytearray, seed: int) -> List[int]: \"\"\" Decodes", "(plain == encoded): encodedMatches += 1 if (plain == decoded): decodedMatches += 1", "self.encodeMap: List[int] = [-1]*(256*8) self.decodeMap: List[int] = [-1]*(256*8) index = 0 for i", "network. Attributes: sBoxes: list of SBoxes used for substitution seed: seed pBox: PBox", "pSeed) for i in range(256): self.seed[i] = decoded[i] ^ self.seed[i] if (self.seed[i] ==", "\"\"\" return self.encodeMap[plain] def decode(self, encoded: int) -> int: \"\"\" Decodes a single", "spw[i] = pw[s*256+i] self.sBoxes[s] = SBox(spw) ppw = bytearray(2048) for i in range(2048):", "0 maxEmpty = 256-i targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter < targetEmpty): if (self.encodeMap[index]", "bytearray() for i in range(256): plain.append(randint(0, 255)) length = len(plain) seed = self.spBox.getSeed()", "= self.encodeRound(plain, 0, pSeed) for i in range(7): encoded = self.encodeRound(encoded, i+1, pSeed)", "numbers. Parameters: plain: block of plain numbers round: iteration of encode pSeed: seed", "((seedAtI & (1<<j)) != 0): decoded[i] = self.sBoxes[j].decodeMap[ decoded[i]] # replacement for SBox.decode()", "return[i] >= 0 | return[i] < 256 | **Modifies:** | self.seed[i] \"\"\" pSeed", "= pw[s*256+i] self.sBoxes[s] = SBox(spw) ppw = bytearray(2048) for i in range(2048): ppw[i]", "256: ba = bytearray() for i in range(256): ba.append(encoded.pop(0)) if (self.seeded): decoded =", "decoded[i] = self.sBoxes[j].decodeMap[ decoded[i]] # replacement for SBox.decode() to improve performance decoded[i] =", "SBox: \"\"\" SBox is a substitution cipher. Attributes: encodeMap: lookuptable used to encode", "in range(256): pSeed = (pSeed+self.seed[i])%256 decoded = self.decodeRound(encoded, 7, pSeed) for invertedI in", "decodedMatches = 0 seedMatches = 0 for i in range(256): if (seed[i] ==", "self.sBoxes[round].encodeMap[i] ^ seedAtI for j in range(8): if ((seedAtI & (1<<j)) != 0):", "def __init__(self, pw: bytearray, seed: bytearray = None): self.sBoxes: List[SBox] = [None]*8 if", "| return >= 0 | return < 256 \"\"\" return self.decodeMap[encoded] class PBox:", "| **Post:** | len(return) == 256 | return[i] >= 1 \"\"\" seed =", "seed2 = self.spBox.getSeed() self.spBox.setSeed(seed) decoded = self.spBox.decode(encoded) decodedMatches = 0 seedMatches = 0", "self.seed[i] = 1 return decoded def getSeed(self) -> bytearray: \"\"\" Gets the seed.", "Dict, Tuple, List class Encoder: def __init__(self, pw: str): password = bytearray() for", "if len(plain) > 0: self.buffer = plain return returnvalue def close(self): while len(self.buffer)", "self.seed[i] = plain[i] ^ self.seed[i] if (self.seed[i] == 0): self.seed[i] = 1 return", "+= 1 self.spBox = SPBox(password) self.buffer = None self.seeded = False def encode(self,", "None self.pBox = None def test_simple(self): plain = bytearray() for i in range(256):", "**Post:** | return >= 0 | return < 256 \"\"\" return self.encodeMap[plain] def", "| pSeed >= 0 | pSeed < 256 | **Post:** | len(return) ==", "decodedMatches += 1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches == 256) class SPBoxUnitTest(unittest.TestCase): def setUp(self):", "[-1]*256 self.decodeMap: List[int] = [-1]*256 index = 0 for i in range(256): emptyCounter", "| len(self.sBoxes) == 8 | len(self.seed) == 256 | self.seed[i] >= 1 \"\"\"", "None def test_simple(self): decodedMatches = 0 encodedMatches = 0 for i in range(256):", "= [-1]*256 index = 0 for i in range(256): emptyCounter = 0 maxEmpty", "decoded class SPBox: \"\"\" SPBox is a substitution-permutation network. Attributes: sBoxes: list of", "seed pBox: PBox used for permutation Parameters: pw: password seed: seed | **Pre:**", "1+(pw[i]%maxEmpty) while (emptyCounter < targetEmpty): if (self.encodeMap[index] == -1): emptyCounter += 1 if", "< 256 | **Post:** | return >= 0 | return < 256 \"\"\"", "256 | self.seed[i] >= 1 \"\"\" def __init__(self, pw: bytearray, seed: bytearray =", "Returns: block of encoded numbers | **Pre:** | len(plain) == 256 | round", "if (self.seed[i] == 0): self.seed[i] = 1 return encoded def decode(self, encoded: bytearray)", "a single plain number. Parameters: plain: plain number Returns: encoded number | **Pre:**", "-> int: \"\"\" Decodes a single encoded number. Parameters: encoded: encoded number Returns:", "(plain[i] == decoded[i]): decodedMatches += 1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches == 256) class", "of decoded numbers | **Pre:** | len(encoded) == 256 | seed >= 0", "plain: bytearray, seed: int) -> bytearray: \"\"\" Encodes a block of plain numbers.", "\"\"\" def __init__(self, pw: bytearray): self.encodeMap: List[int] = [-1]*(256*8) self.decodeMap: List[int] = [-1]*(256*8)", "encoded return returnvalue def close(self): return bytearray() class SBox: \"\"\" SBox is a", "self.spBox = None def test_simple(self): plain = bytearray() for i in range(256): plain.append(randint(0,", "pSeed >= 0 | pSeed < 256 | **Post:** | len(return) == 256", ">= 0 | return[i] < 256 \"\"\" encoded = bytearray(256) for i in", "self.pw = bytearray() for i in range(4096): self.pw.append(randint(0, 255)) self.spBox = SPBox(self.pw) def", "encoded = self.sBox.encode(plain) decoded = self.sBox.decode(encoded) if (plain == encoded): encodedMatches += 1", "(self.seed[i] == 0): self.seed[i] = 1 return decoded def getSeed(self) -> bytearray: \"\"\"", "range(2048): self.pw.append(randint(0, 255)) self.pBox = PBox(self.pw) def tearDown(self): self.pw = None self.pBox =", "(plain[i] == decoded[i]): decodedMatches += 1 self.assertTrue(decodedMatches == length) # TODO encodeMatches self.assertTrue(seedMatches", "iteration of encode pSeed: seed for PBox Returns: block of encoded numbers |", "256 | seed[i] >= 1 | **Post:** | len(self.sBoxes) == 8 | len(self.seed)", "randint import unittest from typing import Dict, Tuple, List class Encoder: def __init__(self,", "block of plain numbers Returns: block of encoded numbers | **Pre:** | len(plain)", "= self.spBox.encode(ba) returnvalue.extend(encoded) if len(plain) > 0: self.buffer = plain return returnvalue def", "seedMatches += 1 for i in range(length): if (plain[i] == decoded[i]): decodedMatches +=", "encoded < 256 | **Post:** | return >= 0 | return < 256", "self.assertTrue(self.spBox.seed[i] != 0) encoded = self.spBox.encode(plain) for i in range(256): self.assertTrue(self.spBox.seed[i] != 0)", "0: self.buffer = encoded return returnvalue def close(self): return bytearray() class SBox: \"\"\"", "len(self.sBoxes) == 8 | len(self.seed) == 256 | self.seed[i] >= 1 \"\"\" def", "seed) decodedMatches = 0 encodedMatches = 0 for i in range(256): if (plain[i]", "# TODO encodeMatches self.assertTrue(seedMatches < 256/10) # TODO encode 2nd batch#plain is edited", "0: self.buffer = plain return returnvalue def close(self): while len(self.buffer) < 256: self.buffer.append(randint(0,", "decode data Parameters: pw: password | **Pre:** | len(pw) == 2048 | **Post:**", "None self.seeded = False def decode(self, encoded: bytearray): returnvalue = bytearray() if self.buffer", "i in range(256): ba.append(plain.pop(0)) encoded = self.spBox.encode(ba) returnvalue.extend(encoded) if len(plain) > 0: self.buffer", "of plain numbers seed: seed Returns: block of encoded numbers | **Pre:** |", "i in range(256): self.seed[i] = plain[i] ^ self.seed[i] if (self.seed[i] == 0): self.seed[i]", "in range(256): seed[i] = self.seed[i] return seed def setSeed(self, seed: bytearray): \"\"\" Sets", "i in range(256): if (seed[i] == seed2[i]): seedMatches += 1 for i in", "to bytearray class SBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i in range(256):", "(self.encodeMap[index] == -1): emptyCounter += 1 if (emptyCounter < targetEmpty): index = (index+1)%(256*8)", "0 encodedMatches = 0 for i in range(256): plain = i encoded =", "self.pBox.decode(encoded, pSeed) for i in range(256): seedAtI = self.seed[i] for invertedJ in range(8):", "pSeed: seed for PBox Returns: block of encoded numbers | **Pre:** | len(plain)", "block of encoded numbers. Parameters: encoded: block of encoded numbers round: iteration of", "SPBox is a substitution-permutation network. Attributes: sBoxes: list of SBoxes used for substitution", "in range(256): self.pw.append(randint(0, 255)) self.sBox = SBox(self.pw) def tearDown(self): self.pw = None self.sBox", "self.decodeMap[self.encodeMap[i]] = i def encode(self, plain: int) -> int: \"\"\" Encodes a single", "numbers. Parameters: plain: block of plain numbers seed: seed Returns: block of encoded", "bytearray: \"\"\" Decodes a block of encoded numbers. Parameters: encoded: block of encoded", "Parameters: pw: password | **Pre:** | len(pw) == 256 | **Post:** | len(self.encodeMap)", "def setUp(self): self.pw = bytearray() for i in range(4096): self.pw.append(randint(0, 255)) self.spBox =", "= (pSeed+self.seed[i])%256 decoded = self.decodeRound(encoded, 7, pSeed) for invertedI in range(7): i =", "encoded: bytearray): returnvalue = bytearray() if self.buffer is not None: encoded = self.buffer+encoded", "c in pw: password.append(ord(c)) index = 0 while len(password) < 4096: password.append(ord(pw[index%len(pw)])) index", "256 | return[i] >= 1 \"\"\" seed = bytearray(256) for i in range(256):", "seed: bytearray = None): self.sBoxes: List[SBox] = [None]*8 if (seed is None): seed", "round: iteration of decode pSeed: seed for PBox Returns: block of decoded numbers", "self.spBox = SPBox(password) self.buffer = None self.seeded = False def decode(self, encoded: bytearray):", "2048 | len(self.decodeMap) == 2048 | self.decodeMap[i] >= 0 | self.decodeMap[i] < 2048", "plain = bytearray() for i in range(256): plain.append(randint(0, 255)) for seed in range(256):", "i in range(2048): self.pw.append(randint(0, 255)) self.pBox = PBox(self.pw) def tearDown(self): self.pw = None", "numbers | **Pre:** | len(plain) == 256 | **Post:** | len(return) == 256", "self.spBox = SPBox(self.pw) def tearDown(self): self.pw = None self.spBox = None def test_simple(self):", "encoded = self.pBox.encode(plain, seed) decoded = self.pBox.decode(encoded, seed) decodedMatches = 0 encodedMatches =", "of plain numbers. Parameters: plain: block of plain numbers seed: seed Returns: block", "+= 2048 index8 = int(index/8) decoded[index8] = decoded[index8]+(1<<(index%8)) return decoded class SPBox: \"\"\"", "lookuptable used to decode data Parameters: pw: password | **Pre:** | len(pw) ==", "decoded[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI return decoded def encode(self, plain: bytearray) -> bytearray:", "= i*8 for b in range(8): if ((encoded[i]) & (1<<b)): index = self.decodeMap[indexVar+b]-seed", "== 256 \"\"\" decoded = self.pBox.decode(encoded, pSeed) for i in range(256): seedAtI =", "encoded: bytearray) -> bytearray: \"\"\" Decodes a block of encoded numbers. Parameters: encoded:", ">= 0 | return[i] < 256 | **Modifies:** | self.seed[i] \"\"\" pSeed =", "= 0 while len(password) < 4096: password.append(ord(pw[index%len(pw)])) index += 1 self.spBox = SPBox(password)", "data decodeMap: lookuptable used to decode data Parameters: pw: password | **Pre:** |", "\"\"\" Gets the seed. Returns: block of seed numbers | **Post:** | len(return)", "seed: seed Returns: block of decoded numbers | **Pre:** | len(encoded) == 256", "8-1-invertedJ if ((seedAtI & (1<<j)) != 0): decoded[i] = self.sBoxes[j].decodeMap[ decoded[i]] # replacement", "+= 1 for i in range(length): if (plain[i] == decoded[i]): decodedMatches += 1", "1 self.spBox = SPBox(password) self.buffer = None self.seeded = False def decode(self, encoded:", "decoded = self.decodeRound(encoded, 7, pSeed) for invertedI in range(7): i = 6-invertedI decoded", "len(self.decodeMap) == 2048 | self.decodeMap[i] >= 0 | self.decodeMap[i] < 2048 \"\"\" def", "= self.encodeRound(encoded, i+1, pSeed) for i in range(256): self.seed[i] = plain[i] ^ self.seed[i]", "to decode data Parameters: pw: password | **Pre:** | len(pw) == 2048 |", "numbers | **Pre:** | len(encoded) == 256 | seed >= 0 | seed", "decodedMatches += 1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches == 256) class PBoxUnitTest(unittest.TestCase): def setUp(self):", "return self.decodeMap[encoded] class PBox: \"\"\" PBox is a transposition cipher. Attributes: encodeMap: lookuptable", "len(encoded) >= 256: ba = bytearray() for i in range(256): ba.append(encoded.pop(0)) if (self.seeded):", "pSeed < 256 | **Post:** | len(return) == 256 \"\"\" decoded = self.pBox.decode(encoded,", "pw: password | **Pre:** | len(pw) == 256 | **Post:** | len(self.encodeMap) ==", "decoded = self.decodeRound(decoded, i, pSeed) for i in range(256): self.seed[i] = decoded[i] ^", "Parameters: seed: block of seed numbers | **Pre:** | len(seed) == 256 |", "= bytearray() for i in range(4096): self.pw.append(randint(0, 255)) self.spBox = SPBox(self.pw) def tearDown(self):", "| **Pre:** | plain >= 0 | plain < 256 | **Post:** |", "Decodes a block of encoded numbers. Parameters: encoded: block of encoded numbers Returns:", "i in range(256): plain.append(randint(0, 255)) for seed in range(256): encoded = self.pBox.encode(plain, seed)", "range(8): if ((plain[i]) & (1<<b)): index = self.encodeMap[(b+indexVar)%2048] index8 = int(index/8) encoded[index8] =", "int) -> bytearray: \"\"\" Decodes a block of encoded numbers. Parameters: encoded: block", "range(8): j = 8-1-invertedJ if ((seedAtI & (1<<j)) != 0): decoded[i] = self.sBoxes[j].decodeMap[", "bytearray() for c in pw: password.append(ord(c)) index = 0 while len(password) < 4096:", "< 256/10) self.assertTrue(decodedMatches == 256) class SPBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for", "| len(plain) == 256 | seed >= 0 | seed < 256 |", "bytearray: \"\"\" Gets the seed. Returns: block of seed numbers | **Post:** |", "(1<<j)) != 0): encoded[i] = self.sBoxes[j].encodeMap[ encoded[i]] # replacement for SBox.encode() to improve", ">= 1 \"\"\" seed = bytearray(256) for i in range(256): seed[i] = self.seed[i]", "used to decode data Parameters: pw: password | **Pre:** | len(pw) == 2048", "None if (not self.seeded): ba = self.spBox.getSeed() returnvalue.extend(ba) self.seeded = True while len(plain)", "= None self.spBox = None def test_simple(self): plain = bytearray() for i in", "= self.spBox.decode(encoded) decodedMatches = 0 seedMatches = 0 for i in range(256): if", "256 | **Post:** | len(return) == 256 \"\"\" decoded = self.pBox.decode(encoded, pSeed) for", "| **Post:** | len(self.encodeMap) == 2048 | self.encodeMap[i] >= 0 | self.encodeMap[i] <", "maxEmpty = 256*8-i targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter < targetEmpty): if (self.encodeMap[index] ==", ">= 256: ba = bytearray() for i in range(256): ba.append(encoded.pop(0)) if (self.seeded): decoded", "tearDown(self): self.pw = None self.pBox = None def test_simple(self): plain = bytearray() for", "j in range(8): if ((seedAtI & (1<<j)) != 0): encoded[i] = self.sBoxes[j].encodeMap[ encoded[i]]", "range(256): self.pw.append(randint(0, 255)) self.sBox = SBox(self.pw) def tearDown(self): self.pw = None self.sBox =", "= 0 seedMatches = 0 for i in range(256): if (seed[i] == seed2[i]):", "bytearray() for i in range(4096): self.pw.append(randint(0, 255)) self.spBox = SPBox(self.pw) def tearDown(self): self.pw", "0 for i in range(256): plain = i encoded = self.sBox.encode(plain) decoded =", "number Returns: decoded number | **Pre:** | encoded >= 0 | encoded <", "to decode data Parameters: pw: password | **Pre:** | len(pw) == 256 |", "in range(7): encoded = self.encodeRound(encoded, i+1, pSeed) for i in range(256): self.seed[i] =", "functions, no deepcopy needed #TODO change to bytearray class SBoxUnitTest(unittest.TestCase): def setUp(self): self.pw", "of SBoxes used for substitution seed: seed pBox: PBox used for permutation Parameters:", "\"\"\" PBox is a transposition cipher. Attributes: encodeMap: lookuptable used to encode data", "if ((plain[i]) & (1<<b)): index = self.encodeMap[(b+indexVar)%2048] index8 = int(index/8) encoded[index8] = encoded[index8]+(1<<(index%8))", "= seed[i] # TODO change general parameter policy: all parameters may be edited", "range(256): plain = i encoded = self.sBox.encode(plain) decoded = self.sBox.decode(encoded) if (plain ==", "in range(256): indexVar = i*8 for b in range(8): if ((encoded[i]) & (1<<b)):", "Returns: block of encoded numbers | **Pre:** | len(plain) == 256 | **Post:**", "targetEmpty): index = (index+1)%256 self.encodeMap[index] = i for i in range(256): self.decodeMap[self.encodeMap[i]] =", "= SBox(spw) ppw = bytearray(2048) for i in range(2048): ppw[i] = pw[8*256+i] self.pBox:", "of plain numbers round: iteration of encode pSeed: seed for PBox Returns: block", "self.seed[i] for invertedJ in range(8): j = 8-1-invertedJ if ((seedAtI & (1<<j)) !=", "= plain[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI for j in range(8): if ((seedAtI &", "seed[i] = self.seed[i] return seed def setSeed(self, seed: bytearray): \"\"\" Sets the seed.", "self.pBox.encode(plain, seed) decoded = self.pBox.decode(encoded, seed) decodedMatches = 0 encodedMatches = 0 for", "= self.sBox.encode(plain) decoded = self.sBox.decode(encoded) if (plain == encoded): encodedMatches += 1 if", "a block of plain numbers. Parameters: plain: block of plain numbers round: iteration", "for i in range(4096): self.pw.append(randint(0, 255)) self.spBox = SPBox(self.pw) def tearDown(self): self.pw =", "self.pBox.encode(encoded, pSeed) return encoded def decodeRound(self, encoded: bytearray, round: int, pSeed: int) ->", "0 | return[i] < 256 \"\"\" decoded = bytearray(256) for i in range(256):", "< 0): index += 2048 index8 = int(index/8) decoded[index8] = decoded[index8]+(1<<(index%8)) return decoded", "returnvalue = bytearray() if self.buffer is not None: encoded = self.buffer+encoded self.buffer =", "seed for s in range(8): spw = bytearray(256) for i in range(256): spw[i]", "be edited by functions, no deepcopy needed #TODO change to bytearray class SBoxUnitTest(unittest.TestCase):", "== 0): self.seed[i] = 1 return encoded def decode(self, encoded: bytearray) -> bytearray:", "return encoded def decodeRound(self, encoded: bytearray, round: int, pSeed: int) -> bytearray: \"\"\"", "| seed[i] >= 1 | **Post:** | len(self.sBoxes) == 8 | len(self.seed) ==", "== 256 | encoded[i] >= 0 | encoded[i] < 256 | **Post:** |", "| self.decodeMap[i] < 2048 \"\"\" def __init__(self, pw: bytearray): self.encodeMap: List[int] = [-1]*(256*8)", "def test_simple(self): decodedMatches = 0 encodedMatches = 0 for i in range(256): plain", "== 256 | return[i] >= 0 | return[i] < 256 \"\"\" decoded =", "1 \"\"\" seed = bytearray(256) for i in range(256): seed[i] = self.seed[i] return", "\"\"\" Sets the seed. Parameters: seed: block of seed numbers | **Pre:** |", "bytearray: \"\"\" Encodes a block of plain numbers. Parameters: plain: block of plain", "seed: bytearray): \"\"\" Sets the seed. Parameters: seed: block of seed numbers |", "i encoded = self.sBox.encode(plain) decoded = self.sBox.decode(encoded) if (plain == encoded): encodedMatches +=", "0 | encoded[i] < 256 | **Post:** | len(return) == 256 | return[i]", "performance decoded[i] = decoded[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI return decoded def encode(self, plain:", "while len(self.buffer) < 256: self.buffer.append(randint(0, 255)) return self.encode(bytearray()) class Decoder: def __init__(self, pw:", "encoded = self.pBox.encode(encoded, pSeed) return encoded def decodeRound(self, encoded: bytearray, round: int, pSeed:", "| **Post:** | len(return) == 256 \"\"\" encoded = bytearray(256) for i in", "ba = bytearray() for i in range(256): ba.append(encoded.pop(0)) if (self.seeded): decoded = self.spBox.decode(ba)", "__init__(self, pw: bytearray, seed: bytearray = None): self.sBoxes: List[SBox] = [None]*8 if (seed", "SBox.decode() to improve performance decoded[i] = decoded[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI return decoded", "256 | **Post:** | len(self.encodeMap) == 256 | self.encodeMap[i] >= 0 | self.encodeMap[i]", "number | **Pre:** | plain >= 0 | plain < 256 | **Post:**", "bytearray() for i in range(256): ba.append(plain.pop(0)) encoded = self.spBox.encode(ba) returnvalue.extend(encoded) if len(plain) >", "def decode(self, encoded: bytearray): returnvalue = bytearray() if self.buffer is not None: encoded", "emptyCounter = 0 maxEmpty = 256*8-i targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter < targetEmpty):", "= bytearray(2048) for i in range(2048): ppw[i] = pw[8*256+i] self.pBox: PBox = PBox(ppw)", "== seed2[i]): seedMatches += 1 for i in range(length): if (plain[i] == decoded[i]):", "seed[i] # TODO change general parameter policy: all parameters may be edited by", "len(plain) >= 256: ba = bytearray() for i in range(256): ba.append(plain.pop(0)) encoded =", "| return < 256 \"\"\" return self.encodeMap[plain] def decode(self, encoded: int) -> int:", "= self.sBox.decode(encoded) if (plain == encoded): encodedMatches += 1 if (plain == decoded):", "random import randint import unittest from typing import Dict, Tuple, List class Encoder:", "number. Parameters: plain: plain number Returns: encoded number | **Pre:** | plain >=", "len(password) < 4096: password.append(ord(pw[index%len(pw)])) index += 1 self.spBox = SPBox(password) self.buffer = None", "range(256): indexVar = i*8 for b in range(8): if ((encoded[i]) & (1<<b)): index", "seed < 256 | **Post:** | len(return) == 256 | return[i] >= 0", "the seed. Parameters: seed: block of seed numbers | **Pre:** | len(seed) ==", "is not None: plain = self.buffer+plain self.buffer = None if (not self.seeded): ba", "__init__(self, pw: str): password = bytearray() for c in pw: password.append(ord(c)) index =", "= bytearray() for i in range(256): ba.append(plain.pop(0)) encoded = self.spBox.encode(ba) returnvalue.extend(encoded) if len(plain)", "ba.append(plain.pop(0)) encoded = self.spBox.encode(ba) returnvalue.extend(encoded) if len(plain) > 0: self.buffer = plain return", "block of encoded numbers. Parameters: encoded: block of encoded numbers Returns: block of", "< 256 | **Post:** | len(return) == 256 \"\"\" decoded = self.pBox.decode(encoded, pSeed)", "int(index/8) decoded[index8] = decoded[index8]+(1<<(index%8)) return decoded class SPBox: \"\"\" SPBox is a substitution-permutation", "**Post:** | len(return) == 256 | return[i] >= 0 | return[i] < 256", "if (emptyCounter < targetEmpty): index = (index+1)%256 self.encodeMap[index] = i for i in", "= self.decodeRound(decoded, i, pSeed) for i in range(256): self.seed[i] = decoded[i] ^ self.seed[i]", "0 | round < 8 | pSeed >= 0 | pSeed < 256", "block of encoded numbers seed: seed Returns: block of decoded numbers | **Pre:**", "i*8+seed for b in range(8): if ((plain[i]) & (1<<b)): index = self.encodeMap[(b+indexVar)%2048] index8", "self.buffer = None if (not self.seeded): ba = self.spBox.getSeed() returnvalue.extend(ba) self.seeded = True", "self.seed[i] >= 1 \"\"\" def __init__(self, pw: bytearray, seed: bytearray = None): self.sBoxes:", "i in range(256*8): emptyCounter = 0 maxEmpty = 256*8-i targetEmpty = 1+(pw[i]%maxEmpty) while", "(self.encodeMap[index] == -1): emptyCounter += 1 if (emptyCounter < targetEmpty): index = (index+1)%256", "needed #TODO change to bytearray class SBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for", "256 \"\"\" def __init__(self, pw: bytearray): self.encodeMap: List[int] = [-1]*256 self.decodeMap: List[int] =", "4096: password.append(ord(pw[index%len(pw)])) index += 1 self.spBox = SPBox(password) self.buffer = None self.seeded =", "def test_simple(self): plain = bytearray() for i in range(256): plain.append(randint(0, 255)) for seed", "seedAtI return decoded def encode(self, plain: bytearray) -> bytearray: \"\"\" Encodes a block", "256 | return[i] >= 0 | return[i] < 256 \"\"\" encoded = bytearray(256)", "plain: bytearray) -> bytearray: \"\"\" Encodes a block of plain numbers. Parameters: plain:", "PBox(ppw) def encodeRound(self, plain: bytearray, round: int, pSeed: int) -> bytearray: \"\"\" Encodes", "bytearray(256) for i in range(256): indexVar = i*8+seed for b in range(8): if", "**Pre:** | len(seed) == 256 | seed[i] >= 1 | **Modifies:** | self.seed[i]", "encoded number. Parameters: encoded: encoded number Returns: decoded number | **Pre:** | encoded", "self.seed[i] if (self.seed[i] == 0): self.seed[i] = 1 return encoded def decode(self, encoded:", "[-1]*(256*8) index = 0 for i in range(256*8): emptyCounter = 0 maxEmpty =", "index = 0 for i in range(256*8): emptyCounter = 0 maxEmpty = 256*8-i", "self.encodeRound(plain, 0, pSeed) for i in range(7): encoded = self.encodeRound(encoded, i+1, pSeed) for", "256/10) self.assertTrue(decodedMatches == 256) class SPBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i", "= self.spBox.getSeed() returnvalue.extend(ba) self.seeded = True while len(plain) >= 256: ba = bytearray()", "**Modifies:** | self.seed[i] \"\"\" for i in range(256): self.seed[i] = seed[i] # TODO", "plain return returnvalue def close(self): while len(self.buffer) < 256: self.buffer.append(randint(0, 255)) return self.encode(bytearray())", "seed | **Pre:** | len(pw) == 4096 | len(seed) == 256 | seed[i]", "!= 0) encoded = self.spBox.encode(plain) for i in range(256): self.assertTrue(self.spBox.seed[i] != 0) seed2", "**Post:** | len(return) == 256 | return[i] >= 1 \"\"\" seed = bytearray(256)", "| len(self.encodeMap) == 256 | self.encodeMap[i] >= 0 | self.encodeMap[i] < 256 |", "in range(8): j = 8-1-invertedJ if ((seedAtI & (1<<j)) != 0): decoded[i] =", "-> int: \"\"\" Encodes a single plain number. Parameters: plain: plain number Returns:", "while (emptyCounter < targetEmpty): if (self.encodeMap[index] == -1): emptyCounter += 1 if (emptyCounter", "encoded = self.spBox.encode(plain) for i in range(256): self.assertTrue(self.spBox.seed[i] != 0) seed2 = self.spBox.getSeed()", "\"\"\" Encodes a single plain number. Parameters: plain: plain number Returns: encoded number", "encode(self, plain: bytearray, seed: int) -> bytearray: \"\"\" Encodes a block of plain", "0 for i in range(256*8): emptyCounter = 0 maxEmpty = 256*8-i targetEmpty =", "bytearray, seed: bytearray = None): self.sBoxes: List[SBox] = [None]*8 if (seed is None):", "deepcopy needed #TODO change to bytearray class SBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray()", "in range(256): emptyCounter = 0 maxEmpty = 256-i targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter", "self.seed[i] = decoded[i] ^ self.seed[i] if (self.seed[i] == 0): self.seed[i] = 1 return", "seed = self.spBox.getSeed() for i in range(256): self.assertTrue(self.spBox.seed[i] != 0) encoded = self.spBox.encode(plain)", "= True if len(encoded) > 0: self.buffer = encoded return returnvalue def close(self):", "| **Pre:** | len(seed) == 256 | seed[i] >= 1 | **Modifies:** |", "encode(self, plain: bytearray): returnvalue = bytearray() if self.buffer is not None: plain =", "plain numbers seed: seed Returns: block of encoded numbers | **Pre:** | len(plain)", "bytearray = None): self.sBoxes: List[SBox] = [None]*8 if (seed is None): seed =", "plain >= 0 | plain < 256 | **Post:** | return >= 0", "close(self): return bytearray() class SBox: \"\"\" SBox is a substitution cipher. Attributes: encodeMap:", "range(8): if ((seedAtI & (1<<j)) != 0): encoded[i] = self.sBoxes[j].encodeMap[ encoded[i]] # replacement", "range(256): if (plain[i] == encoded[i]): encodedMatches += 1 if (plain[i] == decoded[i]): decodedMatches", "block of decoded numbers | **Pre:** | len(encoded) == 256 | seed >=", "bytearray() for i in range(256): self.pw.append(randint(0, 255)) self.sBox = SBox(self.pw) def tearDown(self): self.pw", "pSeed = 0 for i in range(256): pSeed = (pSeed+self.seed[i])%256 decoded = self.decodeRound(encoded,", "None def test_simple(self): plain = bytearray() for i in range(256): plain.append(randint(0, 255)) length", "pBox: PBox used for permutation Parameters: pw: password seed: seed | **Pre:** |", "SBox is a substitution cipher. Attributes: encodeMap: lookuptable used to encode data decodeMap:", "0): encoded[i] = self.sBoxes[j].encodeMap[ encoded[i]] # replacement for SBox.encode() to improve performance encoded", "in range(2048): ppw[i] = pw[8*256+i] self.pBox: PBox = PBox(ppw) def encodeRound(self, plain: bytearray,", "seed in range(256): encoded = self.pBox.encode(plain, seed) decoded = self.pBox.decode(encoded, seed) decodedMatches =", "SPBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i in range(4096): self.pw.append(randint(0, 255)) self.spBox", "int, pSeed: int) -> bytearray: \"\"\" Decodes a block of encoded numbers. Parameters:", "| **Pre:** | len(plain) == 256 | **Post:** | len(return) == 256 |", "255)) length = len(plain) seed = self.spBox.getSeed() for i in range(256): self.assertTrue(self.spBox.seed[i] !=", "self.encodeMap: List[int] = [-1]*256 self.decodeMap: List[int] = [-1]*256 index = 0 for i", "bytearray() for i in range(256): plain.append(randint(0, 255)) for seed in range(256): encoded =", "= 0 for i in range(256*8): emptyCounter = 0 maxEmpty = 256*8-i targetEmpty", "== 2048 | **Post:** | len(self.encodeMap) == 2048 | self.encodeMap[i] >= 0 |", "bytearray, round: int, pSeed: int) -> bytearray: \"\"\" Encodes a block of plain", "bytearray, seed: int) -> bytearray: \"\"\" Encodes a block of plain numbers. Parameters:", "plain: block of plain numbers round: iteration of encode pSeed: seed for PBox", "round: int, pSeed: int) -> bytearray: \"\"\" Encodes a block of plain numbers.", "for PBox Returns: block of decoded numbers | **Pre:** | len(encoded) == 256", "== 256) class PBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i in range(2048):", "block of plain numbers. Parameters: plain: block of plain numbers Returns: block of", "Encodes a single plain number. Parameters: plain: plain number Returns: encoded number |", "| len(encoded) == 256 | encoded[i] >= 0 | encoded[i] < 256 |", "decoded[i]): decodedMatches += 1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches == 256) class SPBoxUnitTest(unittest.TestCase): def", "of encoded numbers. Parameters: encoded: block of encoded numbers round: iteration of decode", "self.seed[i] encoded[i] = plain[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI for j in range(8): if", "((plain[i]) & (1<<b)): index = self.encodeMap[(b+indexVar)%2048] index8 = int(index/8) encoded[index8] = encoded[index8]+(1<<(index%8)) return", "True while len(plain) >= 256: ba = bytearray() for i in range(256): ba.append(plain.pop(0))", "return decoded class SPBox: \"\"\" SPBox is a substitution-permutation network. Attributes: sBoxes: list", "of seed numbers | **Post:** | len(return) == 256 | return[i] >= 1", "encoded: block of encoded numbers round: iteration of decode pSeed: seed for PBox", "= 0 for i in range(256): plain = i encoded = self.sBox.encode(plain) decoded", "Decodes a block of encoded numbers. Parameters: encoded: block of encoded numbers seed:", "**Pre:** | len(encoded) == 256 | seed >= 0 | seed < 256", "SPBox: \"\"\" SPBox is a substitution-permutation network. Attributes: sBoxes: list of SBoxes used", "= decoded[i] ^ self.seed[i] if (self.seed[i] == 0): self.seed[i] = 1 return decoded", "256/10) self.assertTrue(decodedMatches == 256) class PBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i", "0 for i in range(256): pSeed = (pSeed+self.seed[i])%256 decoded = self.decodeRound(encoded, 7, pSeed)", "number | **Pre:** | encoded >= 0 | encoded < 256 | **Post:**", "i in range(256): self.decodeMap[self.encodeMap[i]] = i def encode(self, plain: int) -> int: \"\"\"", "ba.append(encoded.pop(0)) if (self.seeded): decoded = self.spBox.decode(ba) returnvalue.extend(decoded) else: self.spBox.setSeed(ba) self.seeded = True if", "int, pSeed: int) -> bytearray: \"\"\" Encodes a block of plain numbers. Parameters:", "a block of plain numbers. Parameters: plain: block of plain numbers seed: seed", "if ((seedAtI & (1<<j)) != 0): decoded[i] = self.sBoxes[j].decodeMap[ decoded[i]] # replacement for", "pSeed = 0 for i in range(256): pSeed = (pSeed+self.seed[i])%256 encoded = self.encodeRound(plain,", "decoded = self.spBox.decode(ba) returnvalue.extend(decoded) else: self.spBox.setSeed(ba) self.seeded = True if len(encoded) > 0:", ">= 0 | self.encodeMap[i] < 2048 | len(self.decodeMap) == 2048 | self.decodeMap[i] >=", "Encodes a block of plain numbers. Parameters: plain: block of plain numbers seed:", "numbers | **Pre:** | len(encoded) == 256 | round >= 0 | round", "indexVar = i*8 for b in range(8): if ((encoded[i]) & (1<<b)): index =", "i in range(2048): ppw[i] = pw[8*256+i] self.pBox: PBox = PBox(ppw) def encodeRound(self, plain:", "0) encoded = self.spBox.encode(plain) for i in range(256): self.assertTrue(self.spBox.seed[i] != 0) seed2 =", "1 | **Modifies:** | self.seed[i] \"\"\" for i in range(256): self.seed[i] = seed[i]", "len(plain) > 0: self.buffer = plain return returnvalue def close(self): while len(self.buffer) <", "self.seeded = False def decode(self, encoded: bytearray): returnvalue = bytearray() if self.buffer is", "| pSeed < 256 | **Post:** | len(return) == 256 \"\"\" decoded =", "return returnvalue def close(self): return bytearray() class SBox: \"\"\" SBox is a substitution", "in range(256): ba.append(plain.pop(0)) encoded = self.spBox.encode(ba) returnvalue.extend(encoded) if len(plain) > 0: self.buffer =", "**Pre:** | len(plain) == 256 | seed >= 0 | seed < 256", "0 | seed < 256 | **Post:** | len(return) == 256 | return[i]", "256 \"\"\" decoded = bytearray(256) for i in range(256): indexVar = i*8 for", "test_simple(self): plain = bytearray() for i in range(256): plain.append(randint(0, 255)) for seed in", "len(encoded) == 256 | encoded[i] >= 0 | encoded[i] < 256 | **Post:**", "0 while len(password) < 4096: password.append(ord(pw[index%len(pw)])) index += 1 self.spBox = SPBox(password) self.buffer", "| len(pw) == 256 | **Post:** | len(self.encodeMap) == 256 | self.encodeMap[i] >=", "(self.seeded): decoded = self.spBox.decode(ba) returnvalue.extend(decoded) else: self.spBox.setSeed(ba) self.seeded = True if len(encoded) >", "256: ba = bytearray() for i in range(256): ba.append(plain.pop(0)) encoded = self.spBox.encode(ba) returnvalue.extend(encoded)", "i in range(256): seed[i] = self.seed[i] return seed def setSeed(self, seed: bytearray): \"\"\"", "< 256 | len(self.decodeMap) == 256 | self.decodeMap[i] >= 0 | self.decodeMap[i] <", "i in range(256): self.pw.append(randint(0, 255)) self.sBox = SBox(self.pw) def tearDown(self): self.pw = None", "plain: bytearray): returnvalue = bytearray() if self.buffer is not None: plain = self.buffer+plain", "SBoxes used for substitution seed: seed pBox: PBox used for permutation Parameters: pw:", "password | **Pre:** | len(pw) == 256 | **Post:** | len(self.encodeMap) == 256", "SPBox(self.pw) def tearDown(self): self.pw = None self.spBox = None def test_simple(self): plain =", "seed[i] >= 1 | **Modifies:** | self.seed[i] \"\"\" for i in range(256): self.seed[i]", "= None if (not self.seeded): ba = self.spBox.getSeed() returnvalue.extend(ba) self.seeded = True while", "None: encoded = self.buffer+encoded self.buffer = None while len(encoded) >= 256: ba =", "for i in range(256): indexVar = i*8+seed for b in range(8): if ((plain[i])", "self.buffer+encoded self.buffer = None while len(encoded) >= 256: ba = bytearray() for i", "in range(8): if ((plain[i]) & (1<<b)): index = self.encodeMap[(b+indexVar)%2048] index8 = int(index/8) encoded[index8]", "i, pSeed) for i in range(256): self.seed[i] = decoded[i] ^ self.seed[i] if (self.seed[i]", "= self.encodeMap[(b+indexVar)%2048] index8 = int(index/8) encoded[index8] = encoded[index8]+(1<<(index%8)) return encoded def decode(self, encoded:", "round >= 0 | round < 8 | pSeed >= 0 | pSeed", "to improve performance encoded = self.pBox.encode(encoded, pSeed) return encoded def decodeRound(self, encoded: bytearray,", "if (seed is None): seed = bytearray(256) for i in range(256): seed[i] =", "self.sBox = None def test_simple(self): decodedMatches = 0 encodedMatches = 0 for i", "class SBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i in range(256): self.pw.append(randint(0, 255))", "= bytearray(256) for i in range(256): indexVar = i*8+seed for b in range(8):", "Returns: block of encoded numbers | **Pre:** | len(plain) == 256 | seed", "| seed < 256 | **Post:** | len(return) == 256 | return[i] >=", "encoded[i] >= 0 | encoded[i] < 256 | **Post:** | len(return) == 256", "0 for i in range(256): if (plain[i] == encoded[i]): encodedMatches += 1 if", "self.encodeMap[i] >= 0 | self.encodeMap[i] < 256 | len(self.decodeMap) == 256 | self.decodeMap[i]", "1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches == 256) class PBoxUnitTest(unittest.TestCase): def setUp(self): self.pw =", "256 \"\"\" return self.encodeMap[plain] def decode(self, encoded: int) -> int: \"\"\" Decodes a", "numbers | **Pre:** | len(plain) == 256 | round >= 0 | round", "i in range(256): pSeed = (pSeed+self.seed[i])%256 decoded = self.decodeRound(encoded, 7, pSeed) for invertedI", "return[i] >= 1 \"\"\" seed = bytearray(256) for i in range(256): seed[i] =", "2048 | self.decodeMap[i] >= 0 | self.decodeMap[i] < 2048 \"\"\" def __init__(self, pw:", "in pw: password.append(ord(c)) index = 0 while len(password) < 4096: password.append(ord(pw[index%len(pw)])) index +=", "of encoded numbers | **Pre:** | len(plain) == 256 | round >= 0", "pSeed) for i in range(7): encoded = self.encodeRound(encoded, i+1, pSeed) for i in", "| return[i] >= 1 \"\"\" seed = bytearray(256) for i in range(256): seed[i]", "pSeed) return encoded def decodeRound(self, encoded: bytearray, round: int, pSeed: int) -> bytearray:", "decoded): decodedMatches += 1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches == 256) class PBoxUnitTest(unittest.TestCase): def", "self.seed[i] = seed[i] # TODO change general parameter policy: all parameters may be", "self.decodeMap[i] < 2048 \"\"\" def __init__(self, pw: bytearray): self.encodeMap: List[int] = [-1]*(256*8) self.decodeMap:", "self.pBox = PBox(self.pw) def tearDown(self): self.pw = None self.pBox = None def test_simple(self):", "(1<<j)) != 0): decoded[i] = self.sBoxes[j].decodeMap[ decoded[i]] # replacement for SBox.decode() to improve", "= 256-i targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter < targetEmpty): if (self.encodeMap[index] == -1):", "**Pre:** | len(plain) == 256 | **Post:** | len(return) == 256 | **Modifies:**", "< targetEmpty): index = (index+1)%(256*8) self.encodeMap[index] = i for i in range(256*8): self.decodeMap[self.encodeMap[i]]", "def close(self): while len(self.buffer) < 256: self.buffer.append(randint(0, 255)) return self.encode(bytearray()) class Decoder: def", "close(self): while len(self.buffer) < 256: self.buffer.append(randint(0, 255)) return self.encode(bytearray()) class Decoder: def __init__(self,", "== 256 | **Post:** | len(return) == 256 | **Modifies:** | self.seed[i] \"\"\"", "in range(256): self.assertTrue(self.spBox.seed[i] != 0) seed2 = self.spBox.getSeed() self.spBox.setSeed(seed) decoded = self.spBox.decode(encoded) decodedMatches", "encode(self, plain: int) -> int: \"\"\" Encodes a single plain number. Parameters: plain:", ">= 0 | plain < 256 | **Post:** | return >= 0 |", "| self.encodeMap[i] < 256 | len(self.decodeMap) == 256 | self.decodeMap[i] >= 0 |", "is not None: encoded = self.buffer+encoded self.buffer = None while len(encoded) >= 256:", "SBox.encode() to improve performance encoded = self.pBox.encode(encoded, pSeed) return encoded def decodeRound(self, encoded:", "1 if (plain[i] == decoded[i]): decodedMatches += 1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches ==", "0 encodedMatches = 0 for i in range(256): if (plain[i] == encoded[i]): encodedMatches", "for seed in range(256): encoded = self.pBox.encode(plain, seed) decoded = self.pBox.decode(encoded, seed) decodedMatches", "def close(self): return bytearray() class SBox: \"\"\" SBox is a substitution cipher. Attributes:", "256 | round >= 0 | round < 8 | pSeed >= 0", "for i in range(256): seedAtI = self.seed[i] for invertedJ in range(8): j =", "self.sBoxes: List[SBox] = [None]*8 if (seed is None): seed = bytearray(256) for i", "a substitution cipher. Attributes: encodeMap: lookuptable used to encode data decodeMap: lookuptable used", "return bytearray() class SBox: \"\"\" SBox is a substitution cipher. Attributes: encodeMap: lookuptable", "= 1+(pw[i]%maxEmpty) while (emptyCounter < targetEmpty): if (self.encodeMap[index] == -1): emptyCounter += 1", "| **Post:** | return >= 0 | return < 256 \"\"\" return self.decodeMap[encoded]", "tearDown(self): self.pw = None self.spBox = None def test_simple(self): plain = bytearray() for", "2048 \"\"\" def __init__(self, pw: bytearray): self.encodeMap: List[int] = [-1]*(256*8) self.decodeMap: List[int] =", "seed: int) -> List[int]: \"\"\" Decodes a block of encoded numbers. Parameters: encoded:", "length) # TODO encodeMatches self.assertTrue(seedMatches < 256/10) # TODO encode 2nd batch#plain is", "range(256): if (seed[i] == seed2[i]): seedMatches += 1 for i in range(length): if", "decoded[index8]+(1<<(index%8)) return decoded class SPBox: \"\"\" SPBox is a substitution-permutation network. Attributes: sBoxes:", "encoded: block of encoded numbers Returns: block of decoded numbers | **Pre:** |", "< 256 \"\"\" def __init__(self, pw: bytearray): self.encodeMap: List[int] = [-1]*256 self.decodeMap: List[int]", "block of plain numbers. Parameters: plain: block of plain numbers round: iteration of", "Decoder: def __init__(self, pw: str): password = bytearray() for c in pw: password.append(ord(c))", "[-1]*(256*8) self.decodeMap: List[int] = [-1]*(256*8) index = 0 for i in range(256*8): emptyCounter", "| **Pre:** | len(plain) == 256 | round >= 0 | round <", "password seed: seed | **Pre:** | len(pw) == 4096 | len(seed) == 256", "in range(256): seedAtI = self.seed[i] encoded[i] = plain[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI for", "decoded def encode(self, plain: bytearray) -> bytearray: \"\"\" Encodes a block of plain", "self.seed[i] \"\"\" pSeed = 0 for i in range(256): pSeed = (pSeed+self.seed[i])%256 decoded", "| seed >= 0 | seed < 256 | **Post:** | len(return) ==", "is None): seed = bytearray(256) for i in range(256): seed[i] = randint(1, 255)", "list of SBoxes used for substitution seed: seed pBox: PBox used for permutation", "| self.decodeMap[i] < 256 \"\"\" def __init__(self, pw: bytearray): self.encodeMap: List[int] = [-1]*256", "in range(256): spw[i] = pw[s*256+i] self.sBoxes[s] = SBox(spw) ppw = bytearray(2048) for i", "i in range(256): emptyCounter = 0 maxEmpty = 256-i targetEmpty = 1+(pw[i]%maxEmpty) while", "index = self.decodeMap[indexVar+b]-seed if (index < 0): index += 2048 index8 = int(index/8)", "Returns: block of decoded numbers | **Pre:** | len(encoded) == 256 | seed", "= self.pBox.encode(plain, seed) decoded = self.pBox.decode(encoded, seed) decodedMatches = 0 encodedMatches = 0", "spw = bytearray(256) for i in range(256): spw[i] = pw[s*256+i] self.sBoxes[s] = SBox(spw)", "+= 1 if (plain == decoded): decodedMatches += 1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches", "if (self.seeded): decoded = self.spBox.decode(ba) returnvalue.extend(decoded) else: self.spBox.setSeed(ba) self.seeded = True if len(encoded)", "= self.pBox.encode(encoded, pSeed) return encoded def decodeRound(self, encoded: bytearray, round: int, pSeed: int)", "seed: seed Returns: block of encoded numbers | **Pre:** | len(plain) == 256", "SPBox(password) self.buffer = None self.seeded = False def encode(self, plain: bytearray): returnvalue =", "= [-1]*256 self.decodeMap: List[int] = [-1]*256 index = 0 for i in range(256):", "& (1<<b)): index = self.encodeMap[(b+indexVar)%2048] index8 = int(index/8) encoded[index8] = encoded[index8]+(1<<(index%8)) return encoded", "seed. Parameters: seed: block of seed numbers | **Pre:** | len(seed) == 256", "return self.encodeMap[plain] def decode(self, encoded: int) -> int: \"\"\" Decodes a single encoded", "| len(self.decodeMap) == 2048 | self.decodeMap[i] >= 0 | self.decodeMap[i] < 2048 \"\"\"", "| round >= 0 | round < 8 | pSeed >= 0 |", "substitution seed: seed pBox: PBox used for permutation Parameters: pw: password seed: seed", "iteration of decode pSeed: seed for PBox Returns: block of decoded numbers |", "range(7): i = 6-invertedI decoded = self.decodeRound(decoded, i, pSeed) for i in range(256):", "block of encoded numbers. Parameters: encoded: block of encoded numbers seed: seed Returns:", "if (plain[i] == decoded[i]): decodedMatches += 1 self.assertTrue(decodedMatches == length) # TODO encodeMatches", "**Post:** | len(return) == 256 \"\"\" decoded = self.pBox.decode(encoded, pSeed) for i in", "self.decodeMap[i] >= 0 | self.decodeMap[i] < 2048 \"\"\" def __init__(self, pw: bytearray): self.encodeMap:", "password.append(ord(pw[index%len(pw)])) index += 1 self.spBox = SPBox(password) self.buffer = None self.seeded = False", "0 | self.decodeMap[i] < 256 \"\"\" def __init__(self, pw: bytearray): self.encodeMap: List[int] =", "decodeMap: lookuptable used to decode data Parameters: pw: password | **Pre:** | len(pw)", "of encoded numbers Returns: block of decoded numbers | **Pre:** | len(encoded) ==", "password.append(ord(c)) index = 0 while len(password) < 4096: password.append(ord(pw[index%len(pw)])) index += 1 self.spBox", "encodeMap: lookuptable used to encode data decodeMap: lookuptable used to decode data Parameters:", "| len(pw) == 2048 | **Post:** | len(self.encodeMap) == 2048 | self.encodeMap[i] >=", "test_simple(self): decodedMatches = 0 encodedMatches = 0 for i in range(256): plain =", "256 | self.encodeMap[i] >= 0 | self.encodeMap[i] < 256 | len(self.decodeMap) == 256", "List[int] = [-1]*(256*8) index = 0 for i in range(256*8): emptyCounter = 0", "| self.seed[i] \"\"\" pSeed = 0 for i in range(256): pSeed = (pSeed+self.seed[i])%256", "seed) decoded = self.pBox.decode(encoded, seed) decodedMatches = 0 encodedMatches = 0 for i", "0 | plain < 256 | **Post:** | return >= 0 | return", "SBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i in range(256): self.pw.append(randint(0, 255)) self.sBox", "plain < 256 | **Post:** | return >= 0 | return < 256", "encoded numbers seed: seed Returns: block of decoded numbers | **Pre:** | len(encoded)", "pSeed < 256 | **Post:** | len(return) == 256 \"\"\" encoded = bytearray(256)", "transposition cipher. Attributes: encodeMap: lookuptable used to encode data decodeMap: lookuptable used to", "i in range(256): self.assertTrue(self.spBox.seed[i] != 0) encoded = self.spBox.encode(plain) for i in range(256):", "int) -> List[int]: \"\"\" Decodes a block of encoded numbers. Parameters: encoded: block", "= bytearray(256) for i in range(256): seed[i] = randint(1, 255) self.seed: bytearray =", "= None self.pBox = None def test_simple(self): plain = bytearray() for i in", "for i in range(256): self.assertTrue(self.spBox.seed[i] != 0) encoded = self.spBox.encode(plain) for i in", "pSeed = (pSeed+self.seed[i])%256 encoded = self.encodeRound(plain, 0, pSeed) for i in range(7): encoded", "plain numbers. Parameters: plain: block of plain numbers Returns: block of encoded numbers", "parameter policy: all parameters may be edited by functions, no deepcopy needed #TODO", "while len(plain) >= 256: ba = bytearray() for i in range(256): ba.append(plain.pop(0)) encoded", "seed = bytearray(256) for i in range(256): seed[i] = randint(1, 255) self.seed: bytearray", "< 256 \"\"\" decoded = bytearray(256) for i in range(256): indexVar = i*8", "8 | pSeed >= 0 | pSeed < 256 | **Post:** | len(return)", "(pSeed+self.seed[i])%256 decoded = self.decodeRound(encoded, 7, pSeed) for invertedI in range(7): i = 6-invertedI", "= 0 for i in range(256): if (seed[i] == seed2[i]): seedMatches += 1", "i in range(256): self.seed[i] = seed[i] # TODO change general parameter policy: all", "Parameters: plain: plain number Returns: encoded number | **Pre:** | plain >= 0", "seed. Returns: block of seed numbers | **Post:** | len(return) == 256 |", "self.decodeMap[i] >= 0 | self.decodeMap[i] < 256 \"\"\" def __init__(self, pw: bytearray): self.encodeMap:", "len(self.decodeMap) == 256 | self.decodeMap[i] >= 0 | self.decodeMap[i] < 256 \"\"\" def", "if (self.seed[i] == 0): self.seed[i] = 1 return decoded def getSeed(self) -> bytearray:", "plain: block of plain numbers seed: seed Returns: block of encoded numbers |", "not None: plain = self.buffer+plain self.buffer = None if (not self.seeded): ba =", "seedAtI for j in range(8): if ((seedAtI & (1<<j)) != 0): encoded[i] =", "= 0 for i in range(256): pSeed = (pSeed+self.seed[i])%256 encoded = self.encodeRound(plain, 0,", "self.seed[i] \"\"\" pSeed = 0 for i in range(256): pSeed = (pSeed+self.seed[i])%256 encoded", "block of seed numbers | **Post:** | len(return) == 256 | return[i] >=", "+= 1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches == 256) class PBoxUnitTest(unittest.TestCase): def setUp(self): self.pw", "(emptyCounter < targetEmpty): index = (index+1)%256 self.encodeMap[index] = i for i in range(256):", "len(return) == 256 \"\"\" encoded = bytearray(256) for i in range(256): seedAtI =", "replacement for SBox.decode() to improve performance decoded[i] = decoded[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI", "self.spBox.encode(ba) returnvalue.extend(encoded) if len(plain) > 0: self.buffer = plain return returnvalue def close(self):", "seedAtI = self.seed[i] encoded[i] = plain[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI for j in", "TODO change general parameter policy: all parameters may be edited by functions, no", "targetEmpty): if (self.encodeMap[index] == -1): emptyCounter += 1 if (emptyCounter < targetEmpty): index", "= bytearray() for i in range(256): plain.append(randint(0, 255)) for seed in range(256): encoded", "0 | pSeed < 256 | **Post:** | len(return) == 256 \"\"\" encoded", "self.pw.append(randint(0, 255)) self.sBox = SBox(self.pw) def tearDown(self): self.pw = None self.sBox = None", "range(256): seed[i] = self.seed[i] return seed def setSeed(self, seed: bytearray): \"\"\" Sets the", "= seed for s in range(8): spw = bytearray(256) for i in range(256):", "return[i] >= 0 | return[i] < 256 \"\"\" decoded = bytearray(256) for i", "| len(pw) == 4096 | len(seed) == 256 | seed[i] >= 1 |", "encoded numbers | **Pre:** | len(plain) == 256 | **Post:** | len(return) ==", "Parameters: plain: block of plain numbers Returns: block of encoded numbers | **Pre:**", "seedAtI = self.seed[i] for invertedJ in range(8): j = 8-1-invertedJ if ((seedAtI &", "== encoded[i]): encodedMatches += 1 if (plain[i] == decoded[i]): decodedMatches += 1 self.assertTrue(encodedMatches", "for i in range(256): plain = i encoded = self.sBox.encode(plain) decoded = self.sBox.decode(encoded)", "bytearray() for i in range(256): ba.append(encoded.pop(0)) if (self.seeded): decoded = self.spBox.decode(ba) returnvalue.extend(decoded) else:", "emptyCounter = 0 maxEmpty = 256-i targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter < targetEmpty):", "self.buffer+plain self.buffer = None if (not self.seeded): ba = self.spBox.getSeed() returnvalue.extend(ba) self.seeded =", "< 256/10) self.assertTrue(decodedMatches == 256) class PBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for", "unittest from typing import Dict, Tuple, List class Encoder: def __init__(self, pw: str):", "b in range(8): if ((plain[i]) & (1<<b)): index = self.encodeMap[(b+indexVar)%2048] index8 = int(index/8)", "of encoded numbers | **Pre:** | len(plain) == 256 | seed >= 0", "in range(256): self.decodeMap[self.encodeMap[i]] = i def encode(self, plain: int) -> int: \"\"\" Encodes", "(1<<b)): index = self.decodeMap[indexVar+b]-seed if (index < 0): index += 2048 index8 =", "i in range(length): if (plain[i] == decoded[i]): decodedMatches += 1 self.assertTrue(decodedMatches == length)", "seed[i] = randint(1, 255) self.seed: bytearray = seed for s in range(8): spw", "List[int] = [-1]*(256*8) self.decodeMap: List[int] = [-1]*(256*8) index = 0 for i in", "plain[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI for j in range(8): if ((seedAtI & (1<<j))", "encoded[index8] = encoded[index8]+(1<<(index%8)) return encoded def decode(self, encoded: bytearray, seed: int) -> List[int]:", "of encoded numbers | **Pre:** | len(plain) == 256 | **Post:** | len(return)", "encoded[i] = plain[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI for j in range(8): if ((seedAtI", "**Modifies:** | self.seed[i] \"\"\" pSeed = 0 for i in range(256): pSeed =", "randint(1, 255) self.seed: bytearray = seed for s in range(8): spw = bytearray(256)", "self.buffer = None self.seeded = False def encode(self, plain: bytearray): returnvalue = bytearray()", "= randint(1, 255) self.seed: bytearray = seed for s in range(8): spw =", "encoded def decode(self, encoded: bytearray, seed: int) -> List[int]: \"\"\" Decodes a block", "self.pw = bytearray() for i in range(256): self.pw.append(randint(0, 255)) self.sBox = SBox(self.pw) def", "if (not self.seeded): ba = self.spBox.getSeed() returnvalue.extend(ba) self.seeded = True while len(plain) >=", "else: self.spBox.setSeed(ba) self.seeded = True if len(encoded) > 0: self.buffer = encoded return", "= 0 for i in range(256): pSeed = (pSeed+self.seed[i])%256 decoded = self.decodeRound(encoded, 7,", "= i for i in range(256*8): self.decodeMap[self.encodeMap[i]] = i def encode(self, plain: bytearray,", "range(256): self.assertTrue(self.spBox.seed[i] != 0) seed2 = self.spBox.getSeed() self.spBox.setSeed(seed) decoded = self.spBox.decode(encoded) decodedMatches =", "encoded def decodeRound(self, encoded: bytearray, round: int, pSeed: int) -> bytearray: \"\"\" Decodes", "Encodes a block of plain numbers. Parameters: plain: block of plain numbers Returns:", "**Pre:** | encoded >= 0 | encoded < 256 | **Post:** | return", "encoded: bytearray, seed: int) -> List[int]: \"\"\" Decodes a block of encoded numbers.", "= bytearray(256) for i in range(256): indexVar = i*8 for b in range(8):", "len(plain) == 256 | seed >= 0 | seed < 256 | **Post:**", "in range(256): indexVar = i*8+seed for b in range(8): if ((plain[i]) & (1<<b)):", "for i in range(2048): self.pw.append(randint(0, 255)) self.pBox = PBox(self.pw) def tearDown(self): self.pw =", "password | **Pre:** | len(pw) == 2048 | **Post:** | len(self.encodeMap) == 2048", "i in range(256): seed[i] = randint(1, 255) self.seed: bytearray = seed for s", "| **Pre:** | len(encoded) == 256 | seed >= 0 | seed <", "self.pw = None self.sBox = None def test_simple(self): decodedMatches = 0 encodedMatches =", "= self.buffer+encoded self.buffer = None while len(encoded) >= 256: ba = bytearray() for", "self.seed[i] = 1 return encoded def decode(self, encoded: bytearray) -> bytearray: \"\"\" Decodes", "def encode(self, plain: int) -> int: \"\"\" Encodes a single plain number. Parameters:", "in range(256*8): self.decodeMap[self.encodeMap[i]] = i def encode(self, plain: bytearray, seed: int) -> bytearray:", "256 \"\"\" encoded = bytearray(256) for i in range(256): indexVar = i*8+seed for", "j = 8-1-invertedJ if ((seedAtI & (1<<j)) != 0): decoded[i] = self.sBoxes[j].decodeMap[ decoded[i]]", "range(4096): self.pw.append(randint(0, 255)) self.spBox = SPBox(self.pw) def tearDown(self): self.pw = None self.spBox =", "import randint import unittest from typing import Dict, Tuple, List class Encoder: def", "**Pre:** | plain >= 0 | plain < 256 | **Post:** | return", "| **Pre:** | len(pw) == 2048 | **Post:** | len(self.encodeMap) == 2048 |", "self.seeded = True while len(plain) >= 256: ba = bytearray() for i in", "< 256 \"\"\" return self.encodeMap[plain] def decode(self, encoded: int) -> int: \"\"\" Decodes", "= bytearray(256) for i in range(256): spw[i] = pw[s*256+i] self.sBoxes[s] = SBox(spw) ppw", "if ((encoded[i]) & (1<<b)): index = self.decodeMap[indexVar+b]-seed if (index < 0): index +=", "not None: encoded = self.buffer+encoded self.buffer = None while len(encoded) >= 256: ba", "== -1): emptyCounter += 1 if (emptyCounter < targetEmpty): index = (index+1)%(256*8) self.encodeMap[index]", "# replacement for SBox.encode() to improve performance encoded = self.pBox.encode(encoded, pSeed) return encoded", "returnvalue.extend(decoded) else: self.spBox.setSeed(ba) self.seeded = True if len(encoded) > 0: self.buffer = encoded", "emptyCounter += 1 if (emptyCounter < targetEmpty): index = (index+1)%(256*8) self.encodeMap[index] = i", "= (index+1)%(256*8) self.encodeMap[index] = i for i in range(256*8): self.decodeMap[self.encodeMap[i]] = i def", "= bytearray() for i in range(256): self.pw.append(randint(0, 255)) self.sBox = SBox(self.pw) def tearDown(self):", "+= 1 self.assertTrue(encodedMatches < 256/10) self.assertTrue(decodedMatches == 256) class SPBoxUnitTest(unittest.TestCase): def setUp(self): self.pw", "encoded: bytearray, round: int, pSeed: int) -> bytearray: \"\"\" Decodes a block of", "i in range(7): encoded = self.encodeRound(encoded, i+1, pSeed) for i in range(256): self.seed[i]", "Parameters: plain: block of plain numbers round: iteration of encode pSeed: seed for", "| return[i] < 256 | **Modifies:** | self.seed[i] \"\"\" pSeed = 0 for", "| round < 8 | pSeed >= 0 | pSeed < 256 |", "index = (index+1)%256 self.encodeMap[index] = i for i in range(256): self.decodeMap[self.encodeMap[i]] = i", "returnvalue.extend(encoded) if len(plain) > 0: self.buffer = plain return returnvalue def close(self): while", "len(pw) == 2048 | **Post:** | len(self.encodeMap) == 2048 | self.encodeMap[i] >= 0", "for i in range(256): plain.append(randint(0, 255)) for seed in range(256): encoded = self.pBox.encode(plain,", "seed: block of seed numbers | **Pre:** | len(seed) == 256 | seed[i]", "to encode data decodeMap: lookuptable used to decode data Parameters: pw: password |", "for i in range(256): self.seed[i] = plain[i] ^ self.seed[i] if (self.seed[i] == 0):", "255)) return self.encode(bytearray()) class Decoder: def __init__(self, pw: str): password = bytearray() for", "= encoded[index8]+(1<<(index%8)) return encoded def decode(self, encoded: bytearray, seed: int) -> List[int]: \"\"\"", "plain: bytearray, round: int, pSeed: int) -> bytearray: \"\"\" Encodes a block of", "encoded = self.buffer+encoded self.buffer = None while len(encoded) >= 256: ba = bytearray()", "numbers round: iteration of encode pSeed: seed for PBox Returns: block of encoded", "def decode(self, encoded: int) -> int: \"\"\" Decodes a single encoded number. Parameters:", "typing import Dict, Tuple, List class Encoder: def __init__(self, pw: str): password =", "| len(self.encodeMap) == 2048 | self.encodeMap[i] >= 0 | self.encodeMap[i] < 2048 |", ">= 0 | round < 8 | pSeed >= 0 | pSeed <", "range(256): self.decodeMap[self.encodeMap[i]] = i def encode(self, plain: int) -> int: \"\"\" Encodes a", "(not self.seeded): ba = self.spBox.getSeed() returnvalue.extend(ba) self.seeded = True while len(plain) >= 256:", "True if len(encoded) > 0: self.buffer = encoded return returnvalue def close(self): return", "256 | seed[i] >= 1 | **Modifies:** | self.seed[i] \"\"\" for i in", "self.decodeMap: List[int] = [-1]*(256*8) index = 0 for i in range(256*8): emptyCounter =", "plain: plain number Returns: encoded number | **Pre:** | plain >= 0 |", "self.encodeMap[i] < 256 | len(self.decodeMap) == 256 | self.decodeMap[i] >= 0 | self.decodeMap[i]", "range(256): pSeed = (pSeed+self.seed[i])%256 decoded = self.decodeRound(encoded, 7, pSeed) for invertedI in range(7):", "pSeed) for invertedI in range(7): i = 6-invertedI decoded = self.decodeRound(decoded, i, pSeed)", "Parameters: pw: password seed: seed | **Pre:** | len(pw) == 4096 | len(seed)", "pw: password.append(ord(c)) index = 0 while len(password) < 4096: password.append(ord(pw[index%len(pw)])) index += 1", "range(256): ba.append(plain.pop(0)) encoded = self.spBox.encode(ba) returnvalue.extend(encoded) if len(plain) > 0: self.buffer = plain", "(seed[i] == seed2[i]): seedMatches += 1 for i in range(length): if (plain[i] ==", "256 | return[i] >= 0 | return[i] < 256 \"\"\" decoded = bytearray(256)", "for s in range(8): spw = bytearray(256) for i in range(256): spw[i] =", "self.pw = None self.spBox = None def test_simple(self): plain = bytearray() for i", "change to bytearray class SBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i in", "encoded = self.encodeRound(plain, 0, pSeed) for i in range(7): encoded = self.encodeRound(encoded, i+1,", "i in range(256): seedAtI = self.seed[i] encoded[i] = plain[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI", ">= 0 | return < 256 \"\"\" return self.decodeMap[encoded] class PBox: \"\"\" PBox", "for i in range(256): self.decodeMap[self.encodeMap[i]] = i def encode(self, plain: int) -> int:", "self.encodeMap[plain] def decode(self, encoded: int) -> int: \"\"\" Decodes a single encoded number.", "= self.seed[i] encoded[i] = plain[i] ^ self.sBoxes[round].encodeMap[i] ^ seedAtI for j in range(8):", "= (pSeed+self.seed[i])%256 encoded = self.encodeRound(plain, 0, pSeed) for i in range(7): encoded =", "plain number. Parameters: plain: plain number Returns: encoded number | **Pre:** | plain", "block of decoded numbers | **Pre:** | len(encoded) == 256 | round >=", "= self.decodeRound(encoded, 7, pSeed) for invertedI in range(7): i = 6-invertedI decoded =", "decodedMatches += 1 self.assertTrue(decodedMatches == length) # TODO encodeMatches self.assertTrue(seedMatches < 256/10) #", "**Post:** | len(self.encodeMap) == 256 | self.encodeMap[i] >= 0 | self.encodeMap[i] < 256", "range(length): if (plain[i] == decoded[i]): decodedMatches += 1 self.assertTrue(decodedMatches == length) # TODO", "of plain numbers. Parameters: plain: block of plain numbers round: iteration of encode", "ppw = bytearray(2048) for i in range(2048): ppw[i] = pw[8*256+i] self.pBox: PBox =", "| return[i] >= 0 | return[i] < 256 | **Modifies:** | self.seed[i] \"\"\"", "PBox = PBox(ppw) def encodeRound(self, plain: bytearray, round: int, pSeed: int) -> bytearray:", "**Pre:** | len(encoded) == 256 | round >= 0 | round < 8", "in range(256): self.seed[i] = seed[i] # TODO change general parameter policy: all parameters", "PBox: \"\"\" PBox is a transposition cipher. Attributes: encodeMap: lookuptable used to encode", "None: plain = self.buffer+plain self.buffer = None if (not self.seeded): ba = self.spBox.getSeed()", "i def encode(self, plain: int) -> int: \"\"\" Encodes a single plain number.", "256 | return[i] >= 0 | return[i] < 256 | **Modifies:** | self.seed[i]", "self.decodeMap[self.encodeMap[i]] = i def encode(self, plain: bytearray, seed: int) -> bytearray: \"\"\" Encodes", "Returns: encoded number | **Pre:** | plain >= 0 | plain < 256", "List[int] = [-1]*256 index = 0 for i in range(256): emptyCounter = 0", "range(256): self.assertTrue(self.spBox.seed[i] != 0) encoded = self.spBox.encode(plain) for i in range(256): self.assertTrue(self.spBox.seed[i] !=", "in range(256): encoded = self.pBox.encode(plain, seed) decoded = self.pBox.decode(encoded, seed) decodedMatches = 0", "== 256 | self.encodeMap[i] >= 0 | self.encodeMap[i] < 256 | len(self.decodeMap) ==", "seed: seed pBox: PBox used for permutation Parameters: pw: password seed: seed |", "None): self.sBoxes: List[SBox] = [None]*8 if (seed is None): seed = bytearray(256) for", "< 2048 \"\"\" def __init__(self, pw: bytearray): self.encodeMap: List[int] = [-1]*(256*8) self.decodeMap: List[int]", "setUp(self): self.pw = bytearray() for i in range(4096): self.pw.append(randint(0, 255)) self.spBox = SPBox(self.pw)", "a single encoded number. Parameters: encoded: encoded number Returns: decoded number | **Pre:**", "decoded = bytearray(256) for i in range(256): indexVar = i*8 for b in", "Returns: block of seed numbers | **Post:** | len(return) == 256 | return[i]", "Returns: block of decoded numbers | **Pre:** | len(encoded) == 256 | encoded[i]", "getSeed(self) -> bytearray: \"\"\" Gets the seed. Returns: block of seed numbers |", "encoded numbers round: iteration of decode pSeed: seed for PBox Returns: block of", "range(256): self.seed[i] = decoded[i] ^ self.seed[i] if (self.seed[i] == 0): self.seed[i] = 1", "decodeRound(self, encoded: bytearray, round: int, pSeed: int) -> bytearray: \"\"\" Decodes a block", "of decode pSeed: seed for PBox Returns: block of decoded numbers | **Pre:**", "= SPBox(password) self.buffer = None self.seeded = False def encode(self, plain: bytearray): returnvalue", "== 256 | self.decodeMap[i] >= 0 | self.decodeMap[i] < 256 \"\"\" def __init__(self,", "decode pSeed: seed for PBox Returns: block of decoded numbers | **Pre:** |", "def setSeed(self, seed: bytearray): \"\"\" Sets the seed. Parameters: seed: block of seed", "== 256 | round >= 0 | round < 8 | pSeed >=", "256 | **Post:** | len(return) == 256 | **Modifies:** | self.seed[i] \"\"\" pSeed", "index += 1 self.spBox = SPBox(password) self.buffer = None self.seeded = False def", "a block of encoded numbers. Parameters: encoded: block of encoded numbers Returns: block", "= 6-invertedI decoded = self.decodeRound(decoded, i, pSeed) for i in range(256): self.seed[i] =", "self.spBox.setSeed(seed) decoded = self.spBox.decode(encoded) decodedMatches = 0 seedMatches = 0 for i in", "index = 0 while len(password) < 4096: password.append(ord(pw[index%len(pw)])) index += 1 self.spBox =", "| **Pre:** | len(encoded) == 256 | encoded[i] >= 0 | encoded[i] <", "| len(plain) == 256 | round >= 0 | round < 8 |", "| encoded < 256 | **Post:** | return >= 0 | return <", "((encoded[i]) & (1<<b)): index = self.decodeMap[indexVar+b]-seed if (index < 0): index += 2048", "bytearray(256) for i in range(256): seed[i] = randint(1, 255) self.seed: bytearray = seed", "Parameters: encoded: block of encoded numbers Returns: block of decoded numbers | **Pre:**", "Attributes: encodeMap: lookuptable used to encode data decodeMap: lookuptable used to decode data", "round: int, pSeed: int) -> bytearray: \"\"\" Decodes a block of encoded numbers.", "| self.decodeMap[i] >= 0 | self.decodeMap[i] < 256 \"\"\" def __init__(self, pw: bytearray):", "== 256 | return[i] >= 1 \"\"\" seed = bytearray(256) for i in", "0): self.seed[i] = 1 return encoded def decode(self, encoded: bytearray) -> bytearray: \"\"\"", "+= 1 if (emptyCounter < targetEmpty): index = (index+1)%256 self.encodeMap[index] = i for", "seed numbers | **Pre:** | len(seed) == 256 | seed[i] >= 1 |", "= i def encode(self, plain: int) -> int: \"\"\" Encodes a single plain", "return[i] < 256 | **Modifies:** | self.seed[i] \"\"\" pSeed = 0 for i", "= plain return returnvalue def close(self): while len(self.buffer) < 256: self.buffer.append(randint(0, 255)) return", "= 0 for i in range(256): emptyCounter = 0 maxEmpty = 256-i targetEmpty", "bytearray): self.encodeMap: List[int] = [-1]*(256*8) self.decodeMap: List[int] = [-1]*(256*8) index = 0 for", "range(256): plain.append(randint(0, 255)) for seed in range(256): encoded = self.pBox.encode(plain, seed) decoded =", "self.decodeMap: List[int] = [-1]*256 index = 0 for i in range(256): emptyCounter =", "0 | return < 256 \"\"\" return self.decodeMap[encoded] class PBox: \"\"\" PBox is", "self.seeded = False def encode(self, plain: bytearray): returnvalue = bytearray() if self.buffer is", "s in range(8): spw = bytearray(256) for i in range(256): spw[i] = pw[s*256+i]", "return >= 0 | return < 256 \"\"\" return self.decodeMap[encoded] class PBox: \"\"\"", "PBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i in range(2048): self.pw.append(randint(0, 255)) self.pBox", "for invertedJ in range(8): j = 8-1-invertedJ if ((seedAtI & (1<<j)) != 0):", "i in range(256): spw[i] = pw[s*256+i] self.sBoxes[s] = SBox(spw) ppw = bytearray(2048) for", "of encoded numbers round: iteration of decode pSeed: seed for PBox Returns: block", "**Pre:** | len(encoded) == 256 | encoded[i] >= 0 | encoded[i] < 256", "= PBox(self.pw) def tearDown(self): self.pw = None self.pBox = None def test_simple(self): plain", "def tearDown(self): self.pw = None self.sBox = None def test_simple(self): decodedMatches = 0", "if ((seedAtI & (1<<j)) != 0): encoded[i] = self.sBoxes[j].encodeMap[ encoded[i]] # replacement for", "6-invertedI decoded = self.decodeRound(decoded, i, pSeed) for i in range(256): self.seed[i] = decoded[i]", "(pSeed+self.seed[i])%256 encoded = self.encodeRound(plain, 0, pSeed) for i in range(7): encoded = self.encodeRound(encoded,", "for b in range(8): if ((encoded[i]) & (1<<b)): index = self.decodeMap[indexVar+b]-seed if (index", "i in range(256): seedAtI = self.seed[i] for invertedJ in range(8): j = 8-1-invertedJ", "None self.seeded = False def encode(self, plain: bytearray): returnvalue = bytearray() if self.buffer", "def decode(self, encoded: bytearray) -> bytearray: \"\"\" Decodes a block of encoded numbers.", "| len(self.decodeMap) == 256 | self.decodeMap[i] >= 0 | self.decodeMap[i] < 256 \"\"\"", "= False def encode(self, plain: bytearray): returnvalue = bytearray() if self.buffer is not", "0 | return[i] < 256 \"\"\" encoded = bytearray(256) for i in range(256):", "bytearray) -> bytearray: \"\"\" Decodes a block of encoded numbers. Parameters: encoded: block", "i in range(256): pSeed = (pSeed+self.seed[i])%256 encoded = self.encodeRound(plain, 0, pSeed) for i", "i in range(256): plain = i encoded = self.sBox.encode(plain) decoded = self.sBox.decode(encoded) if", "encoded numbers. Parameters: encoded: block of encoded numbers round: iteration of decode pSeed:", "pSeed) for i in range(256): seedAtI = self.seed[i] for invertedJ in range(8): j", "| self.encodeMap[i] < 2048 | len(self.decodeMap) == 2048 | self.decodeMap[i] >= 0 |", "| encoded >= 0 | encoded < 256 | **Post:** | return >=", "def __init__(self, pw: bytearray): self.encodeMap: List[int] = [-1]*(256*8) self.decodeMap: List[int] = [-1]*(256*8) index", "block of decoded numbers | **Pre:** | len(encoded) == 256 | encoded[i] >=", "256) class PBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i in range(2048): self.pw.append(randint(0,", "numbers Returns: block of decoded numbers | **Pre:** | len(encoded) == 256 |", "= SPBox(self.pw) def tearDown(self): self.pw = None self.spBox = None def test_simple(self): plain", "= PBox(ppw) def encodeRound(self, plain: bytearray, round: int, pSeed: int) -> bytearray: \"\"\"", "(seed is None): seed = bytearray(256) for i in range(256): seed[i] = randint(1,", "| len(return) == 256 | **Modifies:** | self.seed[i] \"\"\" pSeed = 0 for", "range(256): emptyCounter = 0 maxEmpty = 256-i targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter <", "seed: seed | **Pre:** | len(pw) == 4096 | len(seed) == 256 |", "encode data decodeMap: lookuptable used to decode data Parameters: pw: password | **Pre:**", "& (1<<b)): index = self.decodeMap[indexVar+b]-seed if (index < 0): index += 2048 index8", "for i in range(256): self.assertTrue(self.spBox.seed[i] != 0) seed2 = self.spBox.getSeed() self.spBox.setSeed(seed) decoded =", "2048 index8 = int(index/8) decoded[index8] = decoded[index8]+(1<<(index%8)) return decoded class SPBox: \"\"\" SPBox", "len(return) == 256 | return[i] >= 0 | return[i] < 256 \"\"\" decoded", "indexVar = i*8+seed for b in range(8): if ((plain[i]) & (1<<b)): index =", "256 \"\"\" decoded = self.pBox.decode(encoded, pSeed) for i in range(256): seedAtI = self.seed[i]", "range(256): encoded = self.pBox.encode(plain, seed) decoded = self.pBox.decode(encoded, seed) decodedMatches = 0 encodedMatches", "= [-1]*(256*8) index = 0 for i in range(256*8): emptyCounter = 0 maxEmpty", "len(seed) == 256 | seed[i] >= 1 | **Post:** | len(self.sBoxes) == 8", "= decoded[index8]+(1<<(index%8)) return decoded class SPBox: \"\"\" SPBox is a substitution-permutation network. Attributes:", "pw: password | **Pre:** | len(pw) == 2048 | **Post:** | len(self.encodeMap) ==", "^ seedAtI for j in range(8): if ((seedAtI & (1<<j)) != 0): encoded[i]", "bytearray class SBoxUnitTest(unittest.TestCase): def setUp(self): self.pw = bytearray() for i in range(256): self.pw.append(randint(0,", "< targetEmpty): if (self.encodeMap[index] == -1): emptyCounter += 1 if (emptyCounter < targetEmpty):", "| plain < 256 | **Post:** | return >= 0 | return <", "class Encoder: def __init__(self, pw: str): password = bytearray() for c in pw:", "self.sBox = SBox(self.pw) def tearDown(self): self.pw = None self.sBox = None def test_simple(self):", "> 0: self.buffer = plain return returnvalue def close(self): while len(self.buffer) < 256:", "len(encoded) > 0: self.buffer = encoded return returnvalue def close(self): return bytearray() class", "List[int] = [-1]*256 self.decodeMap: List[int] = [-1]*256 index = 0 for i in", "= bytearray(256) for i in range(256): seedAtI = self.seed[i] encoded[i] = plain[i] ^", "| len(return) == 256 | return[i] >= 1 \"\"\" seed = bytearray(256) for", "def __init__(self, pw: bytearray): self.encodeMap: List[int] = [-1]*256 self.decodeMap: List[int] = [-1]*256 index", "= int(index/8) encoded[index8] = encoded[index8]+(1<<(index%8)) return encoded def decode(self, encoded: bytearray, seed: int)", "0): index += 2048 index8 = int(index/8) decoded[index8] = decoded[index8]+(1<<(index%8)) return decoded class", "self.spBox.decode(ba) returnvalue.extend(decoded) else: self.spBox.setSeed(ba) self.seeded = True if len(encoded) > 0: self.buffer =", "tearDown(self): self.pw = None self.sBox = None def test_simple(self): decodedMatches = 0 encodedMatches", "= self.pBox.decode(encoded, seed) decodedMatches = 0 encodedMatches = 0 for i in range(256):", "encodeRound(self, plain: bytearray, round: int, pSeed: int) -> bytearray: \"\"\" Encodes a block", "b in range(8): if ((encoded[i]) & (1<<b)): index = self.decodeMap[indexVar+b]-seed if (index <", "maxEmpty = 256-i targetEmpty = 1+(pw[i]%maxEmpty) while (emptyCounter < targetEmpty): if (self.encodeMap[index] ==", "in range(length): if (plain[i] == decoded[i]): decodedMatches += 1 self.assertTrue(decodedMatches == length) #", "return encoded def decode(self, encoded: bytearray, seed: int) -> List[int]: \"\"\" Decodes a", "== 256 | **Post:** | len(self.encodeMap) == 256 | self.encodeMap[i] >= 0 |", "len(pw) == 256 | **Post:** | len(self.encodeMap) == 256 | self.encodeMap[i] >= 0", "self.seed[i] return seed def setSeed(self, seed: bytearray): \"\"\" Sets the seed. Parameters: seed:", "bytearray): returnvalue = bytearray() if self.buffer is not None: plain = self.buffer+plain self.buffer", "len(self.buffer) < 256: self.buffer.append(randint(0, 255)) return self.encode(bytearray()) class Decoder: def __init__(self, pw: str):", "substitution-permutation network. Attributes: sBoxes: list of SBoxes used for substitution seed: seed pBox:", "in range(7): i = 6-invertedI decoded = self.decodeRound(decoded, i, pSeed) for i in", "\"\"\" pSeed = 0 for i in range(256): pSeed = (pSeed+self.seed[i])%256 decoded =", "== 2048 | self.encodeMap[i] >= 0 | self.encodeMap[i] < 2048 | len(self.decodeMap) ==", "encoded numbers Returns: block of decoded numbers | **Pre:** | len(encoded) == 256", "decoded[index8] = decoded[index8]+(1<<(index%8)) return decoded class SPBox: \"\"\" SPBox is a substitution-permutation network." ]
[ "import apis into api package from admin_api.api.auto_generate_app_token_api import AutoGenerateAppTokenApi from admin_api.api.card_api import CardApi", "into api package from admin_api.api.auto_generate_app_token_api import AutoGenerateAppTokenApi from admin_api.api.card_api import CardApi from admin_api.api.client_api", "package from admin_api.api.auto_generate_app_token_api import AutoGenerateAppTokenApi from admin_api.api.card_api import CardApi from admin_api.api.client_api import ClientApi", "flake8: noqa # import apis into api package from admin_api.api.auto_generate_app_token_api import AutoGenerateAppTokenApi from", "# import apis into api package from admin_api.api.auto_generate_app_token_api import AutoGenerateAppTokenApi from admin_api.api.card_api import", "api package from admin_api.api.auto_generate_app_token_api import AutoGenerateAppTokenApi from admin_api.api.card_api import CardApi from admin_api.api.client_api import", "import absolute_import # flake8: noqa # import apis into api package from admin_api.api.auto_generate_app_token_api", "apis into api package from admin_api.api.auto_generate_app_token_api import AutoGenerateAppTokenApi from admin_api.api.card_api import CardApi from", "<filename>web-component/python/admin_api/api/__init__.py<gh_stars>0 from __future__ import absolute_import # flake8: noqa # import apis into api", "from __future__ import absolute_import # flake8: noqa # import apis into api package", "absolute_import # flake8: noqa # import apis into api package from admin_api.api.auto_generate_app_token_api import", "__future__ import absolute_import # flake8: noqa # import apis into api package from", "# flake8: noqa # import apis into api package from admin_api.api.auto_generate_app_token_api import AutoGenerateAppTokenApi", "noqa # import apis into api package from admin_api.api.auto_generate_app_token_api import AutoGenerateAppTokenApi from admin_api.api.card_api" ]
[ "problems.vrp.problem_vrp import CVRP, SDVRP from problems.op.problem_op import OP from problems.pctsp.problem_pctsp import PCTSPDet, PCTSPStoch", "from problems.tsp.problem_tsp import TSP, TSPEdge from problems.vrp.problem_vrp import CVRP, SDVRP from problems.op.problem_op import", "TSP, TSPEdge from problems.vrp.problem_vrp import CVRP, SDVRP from problems.op.problem_op import OP from problems.pctsp.problem_pctsp", "problems.tsp.problem_tsp import TSP, TSPEdge from problems.vrp.problem_vrp import CVRP, SDVRP from problems.op.problem_op import OP", "from problems.vrp.problem_vrp import CVRP, SDVRP from problems.op.problem_op import OP from problems.pctsp.problem_pctsp import PCTSPDet,", "TSPEdge from problems.vrp.problem_vrp import CVRP, SDVRP from problems.op.problem_op import OP from problems.pctsp.problem_pctsp import", "<reponame>rampasek/attention-learn-to-route from problems.tsp.problem_tsp import TSP, TSPEdge from problems.vrp.problem_vrp import CVRP, SDVRP from problems.op.problem_op", "import TSP, TSPEdge from problems.vrp.problem_vrp import CVRP, SDVRP from problems.op.problem_op import OP from" ]
[ "noqa from .cache import Cache # noqa from .benchmark import Benchmark # noqa", "Cache # noqa from .benchmark import Benchmark # noqa # from .experiments import", "import * # noqa from .cache import Cache # noqa from .benchmark import", "from .cache import Cache # noqa from .benchmark import Benchmark # noqa #", "import * # noqa from .azure import * # noqa from .cache import", "noqa from .benchmark import Benchmark # noqa # from .experiments import * #", "# noqa from .azure import * # noqa from .cache import Cache #", "SeBS # noqa from .aws import * # noqa from .azure import *", "noqa from .aws import * # noqa from .azure import * # noqa", "from .aws import * # noqa from .azure import * # noqa from", "# noqa from .aws import * # noqa from .azure import * #", "* # noqa from .cache import Cache # noqa from .benchmark import Benchmark", ".cache import Cache # noqa from .benchmark import Benchmark # noqa # from", "from .azure import * # noqa from .cache import Cache # noqa from", "# noqa from .cache import Cache # noqa from .benchmark import Benchmark #", "import SeBS # noqa from .aws import * # noqa from .azure import", "from .benchmark import Benchmark # noqa # from .experiments import * # noqa", "* # noqa from .azure import * # noqa from .cache import Cache", "# noqa from .benchmark import Benchmark # noqa # from .experiments import *", "from .sebs import SeBS # noqa from .aws import * # noqa from", ".azure import * # noqa from .cache import Cache # noqa from .benchmark", "import Cache # noqa from .benchmark import Benchmark # noqa # from .experiments", ".aws import * # noqa from .azure import * # noqa from .cache", "noqa from .azure import * # noqa from .cache import Cache # noqa", ".sebs import SeBS # noqa from .aws import * # noqa from .azure" ]
[ "+= 1 print(arr) if __name__ == \"__main__\": arr = [-1, 2, -3, 4,", "n): j = 0 for i in range(0, n): if (arr[i] < 0):", "arr[i], arr[j] = arr[j], arr[i] j += 1 print(arr) if __name__ == \"__main__\":", "j = 0 for i in range(0, n): if (arr[i] < 0): arr[i],", "2, -3, 4, 5, 6, -7, 8, 9] n = len(arr) rearrange(arr, n)", "1 print(arr) if __name__ == \"__main__\": arr = [-1, 2, -3, 4, 5,", "[-1, 2, -3, 4, 5, 6, -7, 8, 9] n = len(arr) rearrange(arr,", "if __name__ == \"__main__\": arr = [-1, 2, -3, 4, 5, 6, -7,", "i in range(0, n): if (arr[i] < 0): arr[i], arr[j] = arr[j], arr[i]", "< 0): arr[i], arr[j] = arr[j], arr[i] j += 1 print(arr) if __name__", "= 0 for i in range(0, n): if (arr[i] < 0): arr[i], arr[j]", "arr[j] = arr[j], arr[i] j += 1 print(arr) if __name__ == \"__main__\": arr", "arr[i] j += 1 print(arr) if __name__ == \"__main__\": arr = [-1, 2,", "j += 1 print(arr) if __name__ == \"__main__\": arr = [-1, 2, -3,", "if (arr[i] < 0): arr[i], arr[j] = arr[j], arr[i] j += 1 print(arr)", "0 for i in range(0, n): if (arr[i] < 0): arr[i], arr[j] =", "= [-1, 2, -3, 4, 5, 6, -7, 8, 9] n = len(arr)", "range(0, n): if (arr[i] < 0): arr[i], arr[j] = arr[j], arr[i] j +=", "print(arr) if __name__ == \"__main__\": arr = [-1, 2, -3, 4, 5, 6,", "0): arr[i], arr[j] = arr[j], arr[i] j += 1 print(arr) if __name__ ==", "__name__ == \"__main__\": arr = [-1, 2, -3, 4, 5, 6, -7, 8,", "== \"__main__\": arr = [-1, 2, -3, 4, 5, 6, -7, 8, 9]", "arr[j], arr[i] j += 1 print(arr) if __name__ == \"__main__\": arr = [-1,", "\"__main__\": arr = [-1, 2, -3, 4, 5, 6, -7, 8, 9] n", "rearrange(arr, n): j = 0 for i in range(0, n): if (arr[i] <", "= arr[j], arr[i] j += 1 print(arr) if __name__ == \"__main__\": arr =", "n): if (arr[i] < 0): arr[i], arr[j] = arr[j], arr[i] j += 1", "in range(0, n): if (arr[i] < 0): arr[i], arr[j] = arr[j], arr[i] j", "(arr[i] < 0): arr[i], arr[j] = arr[j], arr[i] j += 1 print(arr) if", "def rearrange(arr, n): j = 0 for i in range(0, n): if (arr[i]", "arr = [-1, 2, -3, 4, 5, 6, -7, 8, 9] n =", "for i in range(0, n): if (arr[i] < 0): arr[i], arr[j] = arr[j]," ]
[]
[ "elif token in OPERATORS: node[\"val\"] = token node[\"right\"] = {} stack.append(node) node =", "in OPERATORS: node[\"val\"] = token node[\"right\"] = {} stack.append(node) node = node[\"right\"] else:", "node[\"val\"] = int(token) parent = stack.pop() node = parent return tree def evaluate(tree):", "operator.add, \"-\": operator.sub, \"*\": operator.mul, \"/\": operator.truediv} LEFT_PAREN = \"(\" RIGHT_PAREN = \")\"", "operate = OPERATORS[tree[\"val\"]] return operate(evaluate(tree[\"left\"]), evaluate(tree[\"right\"])) except KeyError: # no left or no", "= tree for token in expression: if token == LEFT_PAREN: node[\"left\"] = {}", "node = node[\"left\"] elif token == RIGHT_PAREN: node = stack.pop() elif token in", "if parse_tree is None: return \"\" left = construct_expression(parse_tree.get(\"left\")) right = construct_expression(parse_tree.get(\"right\")) val", "int(token) parent = stack.pop() node = parent return tree def evaluate(tree): try: operate", "token == RIGHT_PAREN: node = stack.pop() elif token in OPERATORS: node[\"val\"] = token", "= {} stack.append(node) node = node[\"right\"] else: node[\"val\"] = int(token) parent = stack.pop()", "evaluate(tree[\"right\"])) except KeyError: # no left or no right, so is a leaf", "right, so is a leaf - our base case return tree[\"val\"] def construct_expression(parse_tree):", "{} stack = [tree] node = tree for token in expression: if token", "node = node[\"right\"] else: node[\"val\"] = int(token) parent = stack.pop() node = parent", "stack.pop() node = parent return tree def evaluate(tree): try: operate = OPERATORS[tree[\"val\"]] return", "= stack.pop() elif token in OPERATORS: node[\"val\"] = token node[\"right\"] = {} stack.append(node)", "except KeyError: # no left or no right, so is a leaf -", "so is a leaf - our base case return tree[\"val\"] def construct_expression(parse_tree): if", "parse_tree is None: return \"\" left = construct_expression(parse_tree.get(\"left\")) right = construct_expression(parse_tree.get(\"right\")) val =", "stack.append(node) node = node[\"left\"] elif token == RIGHT_PAREN: node = stack.pop() elif token", "{\"+\": operator.add, \"-\": operator.sub, \"*\": operator.mul, \"/\": operator.truediv} LEFT_PAREN = \"(\" RIGHT_PAREN =", "{} stack.append(node) node = node[\"right\"] else: node[\"val\"] = int(token) parent = stack.pop() node", "RIGHT_PAREN: node = stack.pop() elif token in OPERATORS: node[\"val\"] = token node[\"right\"] =", "\"(\" RIGHT_PAREN = \")\" def build_parse_tree(expression): tree = {} stack = [tree] node", "tree = {} stack = [tree] node = tree for token in expression:", "return operate(evaluate(tree[\"left\"]), evaluate(tree[\"right\"])) except KeyError: # no left or no right, so is", "return \"\" left = construct_expression(parse_tree.get(\"left\")) right = construct_expression(parse_tree.get(\"right\")) val = parse_tree[\"val\"] if left", "node[\"val\"] = token node[\"right\"] = {} stack.append(node) node = node[\"right\"] else: node[\"val\"] =", "[tree] node = tree for token in expression: if token == LEFT_PAREN: node[\"left\"]", "expression: if token == LEFT_PAREN: node[\"left\"] = {} stack.append(node) node = node[\"left\"] elif", "node[\"left\"] elif token == RIGHT_PAREN: node = stack.pop() elif token in OPERATORS: node[\"val\"]", "OPERATORS = {\"+\": operator.add, \"-\": operator.sub, \"*\": operator.mul, \"/\": operator.truediv} LEFT_PAREN = \"(\"", "node = tree for token in expression: if token == LEFT_PAREN: node[\"left\"] =", "for token in expression: if token == LEFT_PAREN: node[\"left\"] = {} stack.append(node) node", "\"\" left = construct_expression(parse_tree.get(\"left\")) right = construct_expression(parse_tree.get(\"right\")) val = parse_tree[\"val\"] if left and", "val = parse_tree[\"val\"] if left and right: return \"({}{}{})\".format(left, val, right) return val", "= stack.pop() node = parent return tree def evaluate(tree): try: operate = OPERATORS[tree[\"val\"]]", "stack.append(node) node = node[\"right\"] else: node[\"val\"] = int(token) parent = stack.pop() node =", "if token == LEFT_PAREN: node[\"left\"] = {} stack.append(node) node = node[\"left\"] elif token", "our base case return tree[\"val\"] def construct_expression(parse_tree): if parse_tree is None: return \"\"", "= construct_expression(parse_tree.get(\"right\")) val = parse_tree[\"val\"] if left and right: return \"({}{}{})\".format(left, val, right)", "OPERATORS[tree[\"val\"]] return operate(evaluate(tree[\"left\"]), evaluate(tree[\"right\"])) except KeyError: # no left or no right, so", "construct_expression(parse_tree.get(\"left\")) right = construct_expression(parse_tree.get(\"right\")) val = parse_tree[\"val\"] if left and right: return \"({}{}{})\".format(left,", "= {} stack = [tree] node = tree for token in expression: if", "= node[\"left\"] elif token == RIGHT_PAREN: node = stack.pop() elif token in OPERATORS:", "no right, so is a leaf - our base case return tree[\"val\"] def", "operator OPERATORS = {\"+\": operator.add, \"-\": operator.sub, \"*\": operator.mul, \"/\": operator.truediv} LEFT_PAREN =", "RIGHT_PAREN = \")\" def build_parse_tree(expression): tree = {} stack = [tree] node =", "right = construct_expression(parse_tree.get(\"right\")) val = parse_tree[\"val\"] if left and right: return \"({}{}{})\".format(left, val,", "parent = stack.pop() node = parent return tree def evaluate(tree): try: operate =", "= node[\"right\"] else: node[\"val\"] = int(token) parent = stack.pop() node = parent return", "def evaluate(tree): try: operate = OPERATORS[tree[\"val\"]] return operate(evaluate(tree[\"left\"]), evaluate(tree[\"right\"])) except KeyError: # no", "None: return \"\" left = construct_expression(parse_tree.get(\"left\")) right = construct_expression(parse_tree.get(\"right\")) val = parse_tree[\"val\"] if", "= parent return tree def evaluate(tree): try: operate = OPERATORS[tree[\"val\"]] return operate(evaluate(tree[\"left\"]), evaluate(tree[\"right\"]))", "== LEFT_PAREN: node[\"left\"] = {} stack.append(node) node = node[\"left\"] elif token == RIGHT_PAREN:", "= {\"+\": operator.add, \"-\": operator.sub, \"*\": operator.mul, \"/\": operator.truediv} LEFT_PAREN = \"(\" RIGHT_PAREN", "\"-\": operator.sub, \"*\": operator.mul, \"/\": operator.truediv} LEFT_PAREN = \"(\" RIGHT_PAREN = \")\" def", "= \")\" def build_parse_tree(expression): tree = {} stack = [tree] node = tree", "return tree[\"val\"] def construct_expression(parse_tree): if parse_tree is None: return \"\" left = construct_expression(parse_tree.get(\"left\"))", "stack.pop() elif token in OPERATORS: node[\"val\"] = token node[\"right\"] = {} stack.append(node) node", "return tree def evaluate(tree): try: operate = OPERATORS[tree[\"val\"]] return operate(evaluate(tree[\"left\"]), evaluate(tree[\"right\"])) except KeyError:", "is None: return \"\" left = construct_expression(parse_tree.get(\"left\")) right = construct_expression(parse_tree.get(\"right\")) val = parse_tree[\"val\"]", "build_parse_tree(expression): tree = {} stack = [tree] node = tree for token in", "parent return tree def evaluate(tree): try: operate = OPERATORS[tree[\"val\"]] return operate(evaluate(tree[\"left\"]), evaluate(tree[\"right\"])) except", "a leaf - our base case return tree[\"val\"] def construct_expression(parse_tree): if parse_tree is", "base case return tree[\"val\"] def construct_expression(parse_tree): if parse_tree is None: return \"\" left", "operator.mul, \"/\": operator.truediv} LEFT_PAREN = \"(\" RIGHT_PAREN = \")\" def build_parse_tree(expression): tree =", "token in OPERATORS: node[\"val\"] = token node[\"right\"] = {} stack.append(node) node = node[\"right\"]", "construct_expression(parse_tree): if parse_tree is None: return \"\" left = construct_expression(parse_tree.get(\"left\")) right = construct_expression(parse_tree.get(\"right\"))", "node = stack.pop() elif token in OPERATORS: node[\"val\"] = token node[\"right\"] = {}", "tree def evaluate(tree): try: operate = OPERATORS[tree[\"val\"]] return operate(evaluate(tree[\"left\"]), evaluate(tree[\"right\"])) except KeyError: #", "leaf - our base case return tree[\"val\"] def construct_expression(parse_tree): if parse_tree is None:", "token node[\"right\"] = {} stack.append(node) node = node[\"right\"] else: node[\"val\"] = int(token) parent", "tree[\"val\"] def construct_expression(parse_tree): if parse_tree is None: return \"\" left = construct_expression(parse_tree.get(\"left\")) right", "operate(evaluate(tree[\"left\"]), evaluate(tree[\"right\"])) except KeyError: # no left or no right, so is a", "= token node[\"right\"] = {} stack.append(node) node = node[\"right\"] else: node[\"val\"] = int(token)", "import operator OPERATORS = {\"+\": operator.add, \"-\": operator.sub, \"*\": operator.mul, \"/\": operator.truediv} LEFT_PAREN", "= OPERATORS[tree[\"val\"]] return operate(evaluate(tree[\"left\"]), evaluate(tree[\"right\"])) except KeyError: # no left or no right,", "case return tree[\"val\"] def construct_expression(parse_tree): if parse_tree is None: return \"\" left =", "left or no right, so is a leaf - our base case return", "node = parent return tree def evaluate(tree): try: operate = OPERATORS[tree[\"val\"]] return operate(evaluate(tree[\"left\"]),", "= \"(\" RIGHT_PAREN = \")\" def build_parse_tree(expression): tree = {} stack = [tree]", "node[\"right\"] else: node[\"val\"] = int(token) parent = stack.pop() node = parent return tree", "= int(token) parent = stack.pop() node = parent return tree def evaluate(tree): try:", "token in expression: if token == LEFT_PAREN: node[\"left\"] = {} stack.append(node) node =", "- our base case return tree[\"val\"] def construct_expression(parse_tree): if parse_tree is None: return", "or no right, so is a leaf - our base case return tree[\"val\"]", "LEFT_PAREN: node[\"left\"] = {} stack.append(node) node = node[\"left\"] elif token == RIGHT_PAREN: node", "LEFT_PAREN = \"(\" RIGHT_PAREN = \")\" def build_parse_tree(expression): tree = {} stack =", "node[\"right\"] = {} stack.append(node) node = node[\"right\"] else: node[\"val\"] = int(token) parent =", "evaluate(tree): try: operate = OPERATORS[tree[\"val\"]] return operate(evaluate(tree[\"left\"]), evaluate(tree[\"right\"])) except KeyError: # no left", "token == LEFT_PAREN: node[\"left\"] = {} stack.append(node) node = node[\"left\"] elif token ==", "elif token == RIGHT_PAREN: node = stack.pop() elif token in OPERATORS: node[\"val\"] =", "in expression: if token == LEFT_PAREN: node[\"left\"] = {} stack.append(node) node = node[\"left\"]", "def construct_expression(parse_tree): if parse_tree is None: return \"\" left = construct_expression(parse_tree.get(\"left\")) right =", "# no left or no right, so is a leaf - our base", "\"/\": operator.truediv} LEFT_PAREN = \"(\" RIGHT_PAREN = \")\" def build_parse_tree(expression): tree = {}", "stack = [tree] node = tree for token in expression: if token ==", "= [tree] node = tree for token in expression: if token == LEFT_PAREN:", "else: node[\"val\"] = int(token) parent = stack.pop() node = parent return tree def", "construct_expression(parse_tree.get(\"right\")) val = parse_tree[\"val\"] if left and right: return \"({}{}{})\".format(left, val, right) return", "\")\" def build_parse_tree(expression): tree = {} stack = [tree] node = tree for", "is a leaf - our base case return tree[\"val\"] def construct_expression(parse_tree): if parse_tree", "node[\"left\"] = {} stack.append(node) node = node[\"left\"] elif token == RIGHT_PAREN: node =", "OPERATORS: node[\"val\"] = token node[\"right\"] = {} stack.append(node) node = node[\"right\"] else: node[\"val\"]", "KeyError: # no left or no right, so is a leaf - our", "no left or no right, so is a leaf - our base case", "= {} stack.append(node) node = node[\"left\"] elif token == RIGHT_PAREN: node = stack.pop()", "left = construct_expression(parse_tree.get(\"left\")) right = construct_expression(parse_tree.get(\"right\")) val = parse_tree[\"val\"] if left and right:", "try: operate = OPERATORS[tree[\"val\"]] return operate(evaluate(tree[\"left\"]), evaluate(tree[\"right\"])) except KeyError: # no left or", "def build_parse_tree(expression): tree = {} stack = [tree] node = tree for token", "\"*\": operator.mul, \"/\": operator.truediv} LEFT_PAREN = \"(\" RIGHT_PAREN = \")\" def build_parse_tree(expression): tree", "operator.sub, \"*\": operator.mul, \"/\": operator.truediv} LEFT_PAREN = \"(\" RIGHT_PAREN = \")\" def build_parse_tree(expression):", "operator.truediv} LEFT_PAREN = \"(\" RIGHT_PAREN = \")\" def build_parse_tree(expression): tree = {} stack", "= construct_expression(parse_tree.get(\"left\")) right = construct_expression(parse_tree.get(\"right\")) val = parse_tree[\"val\"] if left and right: return", "{} stack.append(node) node = node[\"left\"] elif token == RIGHT_PAREN: node = stack.pop() elif", "== RIGHT_PAREN: node = stack.pop() elif token in OPERATORS: node[\"val\"] = token node[\"right\"]", "tree for token in expression: if token == LEFT_PAREN: node[\"left\"] = {} stack.append(node)" ]
[ "E , F, G , N 6 cols with float or int 9", "train_data.E.value_counts() #print train_data.corr() le=LabelEncoder() train_data.E = le.fit_transform(train_data.E) print train_data.F.value_counts() print test_data.F.value_counts() # print", "pd.read_csv(\"../data/test.csv\") #print train_data.info() ''' 15 cols : A to O for training Many", "A B C D E F G H I J K L M", "f 0 f g 120 375 1 1 2 a 21.75 11.750 u", "0 1 2 3 b 30.17 1.085 y p c v 0.040 f", "binary are there in them) ''' #print train_data.describe() #print train_data.head(5) ''' id A", "sklearn.preprocessing import LabelEncoder le=LabelEncoder() train_data.A=le.fit_transform(train_data.A) print le.classes_ #print train_data.D.describe() #print train_data.D.value_counts() le=LabelEncoder() train_data.D", "like binary are there in them) ''' #print train_data.describe() #print train_data.head(5) ''' id", "= pd.read_csv(\"../data/train.csv\") train_data_len=len(train_data) test_data=pd.read_csv(\"../data/test.csv\") test_data_len=len(test_data) data=pd.concat(train_data,test_data) ## pd.read_csv(\"../data/test.csv\") #print train_data.info() ''' 15 cols", ", K , L , M , O , P with null :", "E F G H I J K L M N O P 0", "179 1 3 4 b 22.67 2.540 y p c h 2.585 t", "K L M N O P 0 1 b 18.42 10.415 y p", "''' print train_data[(train_data.A!='a') & (train_data.A!='b')] from sklearn.preprocessing import LabelEncoder le=LabelEncoder() train_data.A=le.fit_transform(train_data.A) print le.classes_", "pd.read_csv(\"../data/train.csv\") train_data_len=len(train_data) test_data=pd.read_csv(\"../data/test.csv\") test_data_len=len(test_data) data=pd.concat(train_data,test_data) ## pd.read_csv(\"../data/test.csv\") #print train_data.info() ''' 15 cols :", "u g c v 0.250 f f 0 t g 180 0 1", "import matplotlib.pylab as plt train_data = pd.read_csv(\"../data/train.csv\") train_data_len=len(train_data) test_data=pd.read_csv(\"../data/test.csv\") test_data_len=len(test_data) data=pd.concat(train_data,test_data) ## pd.read_csv(\"../data/test.csv\")", "Non null : C , H , I , J , K ,", "vars are not linearly correlated ''' print train_data[(train_data.A!='a') & (train_data.A!='b')] from sklearn.preprocessing import", "u g c v 2.000 t t 11 f g 0 456 0", "np import pandas as pd import matplotlib.pylab as plt train_data = pd.read_csv(\"../data/train.csv\") train_data_len=len(train_data)", "C , H , I , J , K , L , M", "J , K , L , M , O , P with null", "N 6 cols with float or int 9 are with string ( seems", "15 cols : A to O for training Many vars have null value", "cols : A to O for training Many vars have null value Non", "train_data.D = le.fit_transform(train_data.D) print le.classes_ #print train_data.corr() #print train_data.E.value_counts() #print train_data.corr() le=LabelEncoder() train_data.E", "null : A , B , D , E , F, G ,", ": A , B , D , E , F, G , N", "0.040 f f 0 f g 170 179 1 3 4 b 22.67", "float or int 9 are with string ( seems like binary are there", "#print train_data.D.value_counts() le=LabelEncoder() train_data.D = le.fit_transform(train_data.D) print le.classes_ #print train_data.corr() #print train_data.E.value_counts() #print", "train_data.corr() #print train_data.E.value_counts() #print train_data.corr() le=LabelEncoder() train_data.E = le.fit_transform(train_data.E) print train_data.F.value_counts() print test_data.F.value_counts()", "#print train_data.E.value_counts() #print train_data.corr() le=LabelEncoder() train_data.E = le.fit_transform(train_data.E) print train_data.F.value_counts() print test_data.F.value_counts() #", "v 2.000 t t 11 f g 0 456 0 ''' #print train_data.corr()", "corr seen states the vars are not linearly correlated ''' print train_data[(train_data.A!='a') &", "have null value Non null : C , H , I , J", "f f 0 t g 180 0 1 2 3 b 30.17 1.085", "test_data=pd.read_csv(\"../data/test.csv\") test_data_len=len(test_data) data=pd.concat(train_data,test_data) ## pd.read_csv(\"../data/test.csv\") #print train_data.info() ''' 15 cols : A to", "f 0 f g 0 0 0 4 5 a 36.00 1.000 u", "Many vars have null value Non null : C , H , I", "f 0 f g 170 179 1 3 4 b 22.67 2.540 y", "train_data.corr() ''' the kind of corr seen states the vars are not linearly", "le.fit_transform(train_data.E) print train_data.F.value_counts() print test_data.F.value_counts() # print train_data.G.value_counts() # print train_data.I.value_counts() # print", "train_data.G.value_counts() # print train_data.I.value_counts() # print train_data.J.value_counts() # print train_data.L.value_counts() # print train_data.M.value_counts()", "180 0 1 2 3 b 30.17 1.085 y p c v 0.040", "I , J , K , L , M , O , P", "f f 0 f g 170 179 1 3 4 b 22.67 2.540", "1.085 y p c v 0.040 f f 0 f g 170 179", "2.540 y p c h 2.585 t f 0 f g 0 0", "30.17 1.085 y p c v 0.040 f f 0 f g 170", "import pandas as pd import matplotlib.pylab as plt train_data = pd.read_csv(\"../data/train.csv\") train_data_len=len(train_data) test_data=pd.read_csv(\"../data/test.csv\")", "aa v 0.125 t f 0 f g 120 375 1 1 2", "f g 120 375 1 1 2 a 21.75 11.750 u g c", "le.fit_transform(train_data.D) print le.classes_ #print train_data.corr() #print train_data.E.value_counts() #print train_data.corr() le=LabelEncoder() train_data.E = le.fit_transform(train_data.E)", "states the vars are not linearly correlated ''' print train_data[(train_data.A!='a') & (train_data.A!='b')] from", ", E , F, G , N 6 cols with float or int", "N O P 0 1 b 18.42 10.415 y p aa v 0.125", "G H I J K L M N O P 0 1 b", "0.250 f f 0 t g 180 0 1 2 3 b 30.17", "le=LabelEncoder() train_data.E = le.fit_transform(train_data.E) print train_data.F.value_counts() print test_data.F.value_counts() # print train_data.G.value_counts() # print", "there in them) ''' #print train_data.describe() #print train_data.head(5) ''' id A B C", "matplotlib.pylab as plt train_data = pd.read_csv(\"../data/train.csv\") train_data_len=len(train_data) test_data=pd.read_csv(\"../data/test.csv\") test_data_len=len(test_data) data=pd.concat(train_data,test_data) ## pd.read_csv(\"../data/test.csv\") #print", "c v 0.250 f f 0 t g 180 0 1 2 3", "c h 2.585 t f 0 f g 0 0 0 4 5", "data=pd.concat(train_data,test_data) ## pd.read_csv(\"../data/test.csv\") #print train_data.info() ''' 15 cols : A to O for", "are there in them) ''' #print train_data.describe() #print train_data.head(5) ''' id A B", "int 9 are with string ( seems like binary are there in them)", "cols with float or int 9 are with string ( seems like binary", "v 0.125 t f 0 f g 120 375 1 1 2 a", "4 5 a 36.00 1.000 u g c v 2.000 t t 11", "g 0 456 0 ''' #print train_data.corr() ''' the kind of corr seen", "print train_data[(train_data.A!='a') & (train_data.A!='b')] from sklearn.preprocessing import LabelEncoder le=LabelEncoder() train_data.A=le.fit_transform(train_data.A) print le.classes_ #print", "''' #print train_data.describe() #print train_data.head(5) ''' id A B C D E F", "#print train_data.corr() ''' the kind of corr seen states the vars are not", "p aa v 0.125 t f 0 f g 120 375 1 1", "t g 180 0 1 2 3 b 30.17 1.085 y p c", "with float or int 9 are with string ( seems like binary are", "train_data.describe() #print train_data.head(5) ''' id A B C D E F G H", ", M , O , P with null : A , B ,", ", O , P with null : A , B , D ,", ", D , E , F, G , N 6 cols with float", "G , N 6 cols with float or int 9 are with string", "M N O P 0 1 b 18.42 10.415 y p aa v", "## pd.read_csv(\"../data/test.csv\") #print train_data.info() ''' 15 cols : A to O for training", "F, G , N 6 cols with float or int 9 are with", "2.585 t f 0 f g 0 0 0 4 5 a 36.00", "a 36.00 1.000 u g c v 2.000 t t 11 f g", "pandas as pd import matplotlib.pylab as plt train_data = pd.read_csv(\"../data/train.csv\") train_data_len=len(train_data) test_data=pd.read_csv(\"../data/test.csv\") test_data_len=len(test_data)", "& (train_data.A!='b')] from sklearn.preprocessing import LabelEncoder le=LabelEncoder() train_data.A=le.fit_transform(train_data.A) print le.classes_ #print train_data.D.describe() #print", "linearly correlated ''' print train_data[(train_data.A!='a') & (train_data.A!='b')] from sklearn.preprocessing import LabelEncoder le=LabelEncoder() train_data.A=le.fit_transform(train_data.A)", "M , O , P with null : A , B , D", "pd import matplotlib.pylab as plt train_data = pd.read_csv(\"../data/train.csv\") train_data_len=len(train_data) test_data=pd.read_csv(\"../data/test.csv\") test_data_len=len(test_data) data=pd.concat(train_data,test_data) ##", "vars have null value Non null : C , H , I ,", ", H , I , J , K , L , M ,", ": C , H , I , J , K , L ,", "with null : A , B , D , E , F, G", "id A B C D E F G H I J K L", "F G H I J K L M N O P 0 1", "#print train_data.head(5) ''' id A B C D E F G H I", "O P 0 1 b 18.42 10.415 y p aa v 0.125 t", "0 f g 120 375 1 1 2 a 21.75 11.750 u g", "P with null : A , B , D , E , F,", "1 b 18.42 10.415 y p aa v 0.125 t f 0 f", "A , B , D , E , F, G , N 6", "I J K L M N O P 0 1 b 18.42 10.415", "g 170 179 1 3 4 b 22.67 2.540 y p c h", "h 2.585 t f 0 f g 0 0 0 4 5 a", "11.750 u g c v 0.250 f f 0 t g 180 0", "456 0 ''' #print train_data.corr() ''' the kind of corr seen states the", "B C D E F G H I J K L M N", "t 11 f g 0 456 0 ''' #print train_data.corr() ''' the kind", "le.classes_ #print train_data.D.describe() #print train_data.D.value_counts() le=LabelEncoder() train_data.D = le.fit_transform(train_data.D) print le.classes_ #print train_data.corr()", "#print train_data.corr() #print train_data.E.value_counts() #print train_data.corr() le=LabelEncoder() train_data.E = le.fit_transform(train_data.E) print train_data.F.value_counts() print", ", I , J , K , L , M , O ,", "g 120 375 1 1 2 a 21.75 11.750 u g c v", "g 180 0 1 2 3 b 30.17 1.085 y p c v", "0 0 0 4 5 a 36.00 1.000 u g c v 2.000", "value Non null : C , H , I , J , K", "c v 2.000 t t 11 f g 0 456 0 ''' #print", "for training Many vars have null value Non null : C , H", "correlated ''' print train_data[(train_data.A!='a') & (train_data.A!='b')] from sklearn.preprocessing import LabelEncoder le=LabelEncoder() train_data.A=le.fit_transform(train_data.A) print", "0.125 t f 0 f g 120 375 1 1 2 a 21.75", "= le.fit_transform(train_data.D) print le.classes_ #print train_data.corr() #print train_data.E.value_counts() #print train_data.corr() le=LabelEncoder() train_data.E =", "print le.classes_ #print train_data.D.describe() #print train_data.D.value_counts() le=LabelEncoder() train_data.D = le.fit_transform(train_data.D) print le.classes_ #print", "170 179 1 3 4 b 22.67 2.540 y p c h 2.585", "<reponame>divayjindal95/DataScience import numpy as np import pandas as pd import matplotlib.pylab as plt", "are not linearly correlated ''' print train_data[(train_data.A!='a') & (train_data.A!='b')] from sklearn.preprocessing import LabelEncoder", "0 ''' #print train_data.corr() ''' the kind of corr seen states the vars", "import numpy as np import pandas as pd import matplotlib.pylab as plt train_data", "A to O for training Many vars have null value Non null :", "y p aa v 0.125 t f 0 f g 120 375 1", "120 375 1 1 2 a 21.75 11.750 u g c v 0.250", "O for training Many vars have null value Non null : C ,", "0 1 b 18.42 10.415 y p aa v 0.125 t f 0", "375 1 1 2 a 21.75 11.750 u g c v 0.250 f", "train_data[(train_data.A!='a') & (train_data.A!='b')] from sklearn.preprocessing import LabelEncoder le=LabelEncoder() train_data.A=le.fit_transform(train_data.A) print le.classes_ #print train_data.D.describe()", "a 21.75 11.750 u g c v 0.250 f f 0 t g", "f g 170 179 1 3 4 b 22.67 2.540 y p c", "18.42 10.415 y p aa v 0.125 t f 0 f g 120", "(train_data.A!='b')] from sklearn.preprocessing import LabelEncoder le=LabelEncoder() train_data.A=le.fit_transform(train_data.A) print le.classes_ #print train_data.D.describe() #print train_data.D.value_counts()", "string ( seems like binary are there in them) ''' #print train_data.describe() #print", "O , P with null : A , B , D , E", "3 4 b 22.67 2.540 y p c h 2.585 t f 0", "2.000 t t 11 f g 0 456 0 ''' #print train_data.corr() '''", "f g 0 456 0 ''' #print train_data.corr() ''' the kind of corr", "D , E , F, G , N 6 cols with float or", "( seems like binary are there in them) ''' #print train_data.describe() #print train_data.head(5)", "0 4 5 a 36.00 1.000 u g c v 2.000 t t", "5 a 36.00 1.000 u g c v 2.000 t t 11 f", "train_data.D.value_counts() le=LabelEncoder() train_data.D = le.fit_transform(train_data.D) print le.classes_ #print train_data.corr() #print train_data.E.value_counts() #print train_data.corr()", "train_data = pd.read_csv(\"../data/train.csv\") train_data_len=len(train_data) test_data=pd.read_csv(\"../data/test.csv\") test_data_len=len(test_data) data=pd.concat(train_data,test_data) ## pd.read_csv(\"../data/test.csv\") #print train_data.info() ''' 15", "L , M , O , P with null : A , B", "in them) ''' #print train_data.describe() #print train_data.head(5) ''' id A B C D", "6 cols with float or int 9 are with string ( seems like", "t t 11 f g 0 456 0 ''' #print train_data.corr() ''' the", "D E F G H I J K L M N O P", "p c h 2.585 t f 0 f g 0 0 0 4", "g 0 0 0 4 5 a 36.00 1.000 u g c v", "0 t g 180 0 1 2 3 b 30.17 1.085 y p", "train_data.A=le.fit_transform(train_data.A) print le.classes_ #print train_data.D.describe() #print train_data.D.value_counts() le=LabelEncoder() train_data.D = le.fit_transform(train_data.D) print le.classes_", "v 0.250 f f 0 t g 180 0 1 2 3 b", "t f 0 f g 0 0 0 4 5 a 36.00 1.000", "numpy as np import pandas as pd import matplotlib.pylab as plt train_data =", "b 30.17 1.085 y p c v 0.040 f f 0 f g", "1 1 2 a 21.75 11.750 u g c v 0.250 f f", "36.00 1.000 u g c v 2.000 t t 11 f g 0", "2 a 21.75 11.750 u g c v 0.250 f f 0 t", "11 f g 0 456 0 ''' #print train_data.corr() ''' the kind of", "B , D , E , F, G , N 6 cols with", "train_data.E = le.fit_transform(train_data.E) print train_data.F.value_counts() print test_data.F.value_counts() # print train_data.G.value_counts() # print train_data.I.value_counts()", "#print train_data.describe() #print train_data.head(5) ''' id A B C D E F G", "= le.fit_transform(train_data.E) print train_data.F.value_counts() print test_data.F.value_counts() # print train_data.G.value_counts() # print train_data.I.value_counts() #", "10.415 y p aa v 0.125 t f 0 f g 120 375", "''' id A B C D E F G H I J K", ", F, G , N 6 cols with float or int 9 are", "the vars are not linearly correlated ''' print train_data[(train_data.A!='a') & (train_data.A!='b')] from sklearn.preprocessing", "are with string ( seems like binary are there in them) ''' #print", "test_data_len=len(test_data) data=pd.concat(train_data,test_data) ## pd.read_csv(\"../data/test.csv\") #print train_data.info() ''' 15 cols : A to O", "import LabelEncoder le=LabelEncoder() train_data.A=le.fit_transform(train_data.A) print le.classes_ #print train_data.D.describe() #print train_data.D.value_counts() le=LabelEncoder() train_data.D =", "train_data.D.describe() #print train_data.D.value_counts() le=LabelEncoder() train_data.D = le.fit_transform(train_data.D) print le.classes_ #print train_data.corr() #print train_data.E.value_counts()", ", P with null : A , B , D , E ,", "y p c h 2.585 t f 0 f g 0 0 0", "0 f g 0 0 0 4 5 a 36.00 1.000 u g", "''' the kind of corr seen states the vars are not linearly correlated", "kind of corr seen states the vars are not linearly correlated ''' print", "train_data.info() ''' 15 cols : A to O for training Many vars have", "4 b 22.67 2.540 y p c h 2.585 t f 0 f", "with string ( seems like binary are there in them) ''' #print train_data.describe()", "le=LabelEncoder() train_data.D = le.fit_transform(train_data.D) print le.classes_ #print train_data.corr() #print train_data.E.value_counts() #print train_data.corr() le=LabelEncoder()", "f g 0 0 0 4 5 a 36.00 1.000 u g c", "''' 15 cols : A to O for training Many vars have null", "seems like binary are there in them) ''' #print train_data.describe() #print train_data.head(5) '''", "H I J K L M N O P 0 1 b 18.42", "0 456 0 ''' #print train_data.corr() ''' the kind of corr seen states", "null : C , H , I , J , K , L", "from sklearn.preprocessing import LabelEncoder le=LabelEncoder() train_data.A=le.fit_transform(train_data.A) print le.classes_ #print train_data.D.describe() #print train_data.D.value_counts() le=LabelEncoder()", "print train_data.G.value_counts() # print train_data.I.value_counts() # print train_data.J.value_counts() # print train_data.L.value_counts() # print", "test_data.F.value_counts() # print train_data.G.value_counts() # print train_data.I.value_counts() # print train_data.J.value_counts() # print train_data.L.value_counts()", "21.75 11.750 u g c v 0.250 f f 0 t g 180", "v 0.040 f f 0 f g 170 179 1 3 4 b", "3 b 30.17 1.085 y p c v 0.040 f f 0 f", "them) ''' #print train_data.describe() #print train_data.head(5) ''' id A B C D E", "or int 9 are with string ( seems like binary are there in", "#print train_data.info() ''' 15 cols : A to O for training Many vars", "train_data.head(5) ''' id A B C D E F G H I J", "of corr seen states the vars are not linearly correlated ''' print train_data[(train_data.A!='a')", "J K L M N O P 0 1 b 18.42 10.415 y", ", J , K , L , M , O , P with", "not linearly correlated ''' print train_data[(train_data.A!='a') & (train_data.A!='b')] from sklearn.preprocessing import LabelEncoder le=LabelEncoder()", "le=LabelEncoder() train_data.A=le.fit_transform(train_data.A) print le.classes_ #print train_data.D.describe() #print train_data.D.value_counts() le=LabelEncoder() train_data.D = le.fit_transform(train_data.D) print", "print test_data.F.value_counts() # print train_data.G.value_counts() # print train_data.I.value_counts() # print train_data.J.value_counts() # print", "K , L , M , O , P with null : A", ", N 6 cols with float or int 9 are with string (", "g c v 2.000 t t 11 f g 0 456 0 '''", "H , I , J , K , L , M , O", ", L , M , O , P with null : A ,", "plt train_data = pd.read_csv(\"../data/train.csv\") train_data_len=len(train_data) test_data=pd.read_csv(\"../data/test.csv\") test_data_len=len(test_data) data=pd.concat(train_data,test_data) ## pd.read_csv(\"../data/test.csv\") #print train_data.info() '''", "seen states the vars are not linearly correlated ''' print train_data[(train_data.A!='a') & (train_data.A!='b')]", "0 0 4 5 a 36.00 1.000 u g c v 2.000 t", "''' #print train_data.corr() ''' the kind of corr seen states the vars are", "b 18.42 10.415 y p aa v 0.125 t f 0 f g", "1 3 4 b 22.67 2.540 y p c h 2.585 t f", "as plt train_data = pd.read_csv(\"../data/train.csv\") train_data_len=len(train_data) test_data=pd.read_csv(\"../data/test.csv\") test_data_len=len(test_data) data=pd.concat(train_data,test_data) ## pd.read_csv(\"../data/test.csv\") #print train_data.info()", "f 0 t g 180 0 1 2 3 b 30.17 1.085 y", ": A to O for training Many vars have null value Non null", "LabelEncoder le=LabelEncoder() train_data.A=le.fit_transform(train_data.A) print le.classes_ #print train_data.D.describe() #print train_data.D.value_counts() le=LabelEncoder() train_data.D = le.fit_transform(train_data.D)", "2 3 b 30.17 1.085 y p c v 0.040 f f 0", "g c v 0.250 f f 0 t g 180 0 1 2", "y p c v 0.040 f f 0 f g 170 179 1", "training Many vars have null value Non null : C , H ,", "train_data_len=len(train_data) test_data=pd.read_csv(\"../data/test.csv\") test_data_len=len(test_data) data=pd.concat(train_data,test_data) ## pd.read_csv(\"../data/test.csv\") #print train_data.info() ''' 15 cols : A", "c v 0.040 f f 0 f g 170 179 1 3 4", "the kind of corr seen states the vars are not linearly correlated '''", "as pd import matplotlib.pylab as plt train_data = pd.read_csv(\"../data/train.csv\") train_data_len=len(train_data) test_data=pd.read_csv(\"../data/test.csv\") test_data_len=len(test_data) data=pd.concat(train_data,test_data)", "# print train_data.G.value_counts() # print train_data.I.value_counts() # print train_data.J.value_counts() # print train_data.L.value_counts() #", "9 are with string ( seems like binary are there in them) '''", "t f 0 f g 120 375 1 1 2 a 21.75 11.750", "22.67 2.540 y p c h 2.585 t f 0 f g 0", "as np import pandas as pd import matplotlib.pylab as plt train_data = pd.read_csv(\"../data/train.csv\")", ", B , D , E , F, G , N 6 cols", "train_data.corr() le=LabelEncoder() train_data.E = le.fit_transform(train_data.E) print train_data.F.value_counts() print test_data.F.value_counts() # print train_data.G.value_counts() #", "#print train_data.D.describe() #print train_data.D.value_counts() le=LabelEncoder() train_data.D = le.fit_transform(train_data.D) print le.classes_ #print train_data.corr() #print", "print train_data.F.value_counts() print test_data.F.value_counts() # print train_data.G.value_counts() # print train_data.I.value_counts() # print train_data.J.value_counts()", "null value Non null : C , H , I , J ,", "P 0 1 b 18.42 10.415 y p aa v 0.125 t f", "L M N O P 0 1 b 18.42 10.415 y p aa", "#print train_data.corr() le=LabelEncoder() train_data.E = le.fit_transform(train_data.E) print train_data.F.value_counts() print test_data.F.value_counts() # print train_data.G.value_counts()", "C D E F G H I J K L M N O", "le.classes_ #print train_data.corr() #print train_data.E.value_counts() #print train_data.corr() le=LabelEncoder() train_data.E = le.fit_transform(train_data.E) print train_data.F.value_counts()", "p c v 0.040 f f 0 f g 170 179 1 3", "print le.classes_ #print train_data.corr() #print train_data.E.value_counts() #print train_data.corr() le=LabelEncoder() train_data.E = le.fit_transform(train_data.E) print", "0 f g 170 179 1 3 4 b 22.67 2.540 y p", "b 22.67 2.540 y p c h 2.585 t f 0 f g", "train_data.F.value_counts() print test_data.F.value_counts() # print train_data.G.value_counts() # print train_data.I.value_counts() # print train_data.J.value_counts() #", "to O for training Many vars have null value Non null : C", "1.000 u g c v 2.000 t t 11 f g 0 456", "1 2 a 21.75 11.750 u g c v 0.250 f f 0", "1 2 3 b 30.17 1.085 y p c v 0.040 f f" ]
[ "with click.progressbar(keys) as bar: for key in bar: c = 0 for token", "import Pool from shelve import DbfilenameShelf from tokenizer import RegexpTokenizer class AbstractDB(DbfilenameShelf): def", "import defaultdict from contextlib import closing from functools import partial from multiprocessing.pool import", "out_file, pool_size): with closing(AbstractDB(out_file, protocol=-1)) as db: target_files = [f for f in", "0 for token in tokenizer.tokenize(self[key]['text']): if token.text.lower() in words: c += 1 word_count", "tokenizer import RegexpTokenizer class AbstractDB(DbfilenameShelf): def __init__(self, *args, **kwargs): DbfilenameShelf.__init__(self, *args, **kwargs) @staticmethod", "= title.decode('utf-8').replace(u'_', u' ') if p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#isString': texts[title] = o elif p", "if token.text.lower() in words: c += 1 word_count += min(c, max_text_len) return word_count", "db: target_files = [f for f in sorted(os.listdir(in_dir)) if f.endswith('ttl.gz')] with closing(Pool(pool_size)) as", "**kwargs) @staticmethod def build(in_dir, out_file, pool_size): with closing(AbstractDB(out_file, protocol=-1)) as db: target_files =", "re.compile(ur'^http://dbpedia\\.org/resource/(.*)/abstract#offset_(\\d+)_(\\d+)$') dbp_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)$') click.echo('Processing %s' % file_name) g = rdflib.Graph() with gzip.GzipFile(os.path.join(in_dir,", "import closing from functools import partial from multiprocessing.pool import Pool from shelve import", "shelve import DbfilenameShelf from tokenizer import RegexpTokenizer class AbstractDB(DbfilenameShelf): def __init__(self, *args, **kwargs):", "p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#anchorOf': span = (int(abs_match_obj.group(2)), int(abs_match_obj.group(3))) mentions[title][s] = (o, span) elif p", "link_title) in mention_titles[title].items(): (name, span) = mentions[title][key] links.append((name, link_title, span)) ret.append((title.encode('utf-8'), dict(title=title, text=text,", "if match_obj: link_title = urllib.unquote(match_obj.group(1).encode('utf-8')) link_title = link_title.decode('utf-8').replace(u'_', u' ') mention_titles[title][s] = link_title", "with closing(Pool(pool_size)) as pool: f = partial(_process_file, in_dir=in_dir) for ret in pool.imap(f, target_files):", "__init__(self, *args, **kwargs): DbfilenameShelf.__init__(self, *args, **kwargs) @staticmethod def build(in_dir, out_file, pool_size): with closing(AbstractDB(out_file,", "click import gzip import os import rdflib import re import urllib from collections", "= frozenset(list(vocab.words())) word_count = 0 with click.progressbar(keys) as bar: for key in bar:", "contextlib import closing from functools import partial from multiprocessing.pool import Pool from shelve", "pool_size): with closing(AbstractDB(out_file, protocol=-1)) as db: target_files = [f for f in sorted(os.listdir(in_dir))", "= unicode(p) o = unicode(o) abs_match_obj = abs_matcher.match(s) title = urllib.unquote(urllib.unquote(abs_match_obj.group(1).encode('utf-8'))) title =", "click.progressbar(keys) as bar: for key in bar: c = 0 for token in", "word_count = 0 with click.progressbar(keys) as bar: for key in bar: c =", "= link_title ret = [] for (title, text) in texts.iteritems(): links = []", "title.decode('utf-8').replace(u'_', u' ') if p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#isString': texts[title] = o elif p ==", "u'http://www.w3.org/2005/11/its/rdf#taIdentRef': match_obj = dbp_matcher.match(o) if match_obj: link_title = urllib.unquote(match_obj.group(1).encode('utf-8')) link_title = link_title.decode('utf-8').replace(u'_', u'", "# -*- coding: utf-8 -*- import click import gzip import os import rdflib", "re import urllib from collections import defaultdict from contextlib import closing from functools", "-*- import click import gzip import os import rdflib import re import urllib", "c = 0 for token in tokenizer.tokenize(self[key]['text']): if token.text.lower() in words: c +=", "format='turtle') texts = {} mentions = defaultdict(dict) mention_titles = defaultdict(dict) for (s, p,", "gzip import os import rdflib import re import urllib from collections import defaultdict", "tokenizer = RegexpTokenizer() keys = self.keys() words = frozenset(list(vocab.words())) word_count = 0 with", "multiprocessing.pool import Pool from shelve import DbfilenameShelf from tokenizer import RegexpTokenizer class AbstractDB(DbfilenameShelf):", "from shelve import DbfilenameShelf from tokenizer import RegexpTokenizer class AbstractDB(DbfilenameShelf): def __init__(self, *args,", "click.echo('Processing %s' % file_name) g = rdflib.Graph() with gzip.GzipFile(os.path.join(in_dir, file_name)) as f: g.load(f,", "elif p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#anchorOf': span = (int(abs_match_obj.group(2)), int(abs_match_obj.group(3))) mentions[title][s] = (o, span) elif", "= [] for (title, text) in texts.iteritems(): links = [] for (key, link_title)", "@staticmethod def build(in_dir, out_file, pool_size): with closing(AbstractDB(out_file, protocol=-1)) as db: target_files = [f", "as pool: f = partial(_process_file, in_dir=in_dir) for ret in pool.imap(f, target_files): for (key,", "defaultdict from contextlib import closing from functools import partial from multiprocessing.pool import Pool", "collections import defaultdict from contextlib import closing from functools import partial from multiprocessing.pool", "words = frozenset(list(vocab.words())) word_count = 0 with click.progressbar(keys) as bar: for key in", "ret = [] for (title, text) in texts.iteritems(): links = [] for (key,", "max_text_len): tokenizer = RegexpTokenizer() keys = self.keys() words = frozenset(list(vocab.words())) word_count = 0", "closing(Pool(pool_size)) as pool: f = partial(_process_file, in_dir=in_dir) for ret in pool.imap(f, target_files): for", "for ret in pool.imap(f, target_files): for (key, obj) in ret: db[key] = obj", "RegexpTokenizer class AbstractDB(DbfilenameShelf): def __init__(self, *args, **kwargs): DbfilenameShelf.__init__(self, *args, **kwargs) @staticmethod def build(in_dir,", "import urllib from collections import defaultdict from contextlib import closing from functools import", "in ret: db[key] = obj def count_valid_words(self, vocab, max_text_len): tokenizer = RegexpTokenizer() keys", "ret in pool.imap(f, target_files): for (key, obj) in ret: db[key] = obj def", "= {} mentions = defaultdict(dict) mention_titles = defaultdict(dict) for (s, p, o) in", "o = unicode(o) abs_match_obj = abs_matcher.match(s) title = urllib.unquote(urllib.unquote(abs_match_obj.group(1).encode('utf-8'))) title = title.decode('utf-8').replace(u'_', u'", "link_title = urllib.unquote(match_obj.group(1).encode('utf-8')) link_title = link_title.decode('utf-8').replace(u'_', u' ') mention_titles[title][s] = link_title ret =", "u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#isString': texts[title] = o elif p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#anchorOf': span = (int(abs_match_obj.group(2)), int(abs_match_obj.group(3))) mentions[title][s]", "DbfilenameShelf from tokenizer import RegexpTokenizer class AbstractDB(DbfilenameShelf): def __init__(self, *args, **kwargs): DbfilenameShelf.__init__(self, *args,", "= RegexpTokenizer() keys = self.keys() words = frozenset(list(vocab.words())) word_count = 0 with click.progressbar(keys)", "mention_titles = defaultdict(dict) for (s, p, o) in g: s = unicode(s) p", "o elif p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#anchorOf': span = (int(abs_match_obj.group(2)), int(abs_match_obj.group(3))) mentions[title][s] = (o, span)", "mention_titles[title][s] = link_title ret = [] for (title, text) in texts.iteritems(): links =", "(name, span) = mentions[title][key] links.append((name, link_title, span)) ret.append((title.encode('utf-8'), dict(title=title, text=text, links=links))) return ret", "obj def count_valid_words(self, vocab, max_text_len): tokenizer = RegexpTokenizer() keys = self.keys() words =", "(title, text) in texts.iteritems(): links = [] for (key, link_title) in mention_titles[title].items(): (name,", "for (s, p, o) in g: s = unicode(s) p = unicode(p) o", "in g: s = unicode(s) p = unicode(p) o = unicode(o) abs_match_obj =", "= abs_matcher.match(s) title = urllib.unquote(urllib.unquote(abs_match_obj.group(1).encode('utf-8'))) title = title.decode('utf-8').replace(u'_', u' ') if p ==", "1 word_count += min(c, max_text_len) return word_count def _process_file(file_name, in_dir): abs_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)/abstract#offset_(\\d+)_(\\d+)$')", "match_obj = dbp_matcher.match(o) if match_obj: link_title = urllib.unquote(match_obj.group(1).encode('utf-8')) link_title = link_title.decode('utf-8').replace(u'_', u' ')", "def _process_file(file_name, in_dir): abs_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)/abstract#offset_(\\d+)_(\\d+)$') dbp_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)$') click.echo('Processing %s' % file_name)", "in mention_titles[title].items(): (name, span) = mentions[title][key] links.append((name, link_title, span)) ret.append((title.encode('utf-8'), dict(title=title, text=text, links=links)))", "vocab, max_text_len): tokenizer = RegexpTokenizer() keys = self.keys() words = frozenset(list(vocab.words())) word_count =", "= obj def count_valid_words(self, vocab, max_text_len): tokenizer = RegexpTokenizer() keys = self.keys() words", "partial(_process_file, in_dir=in_dir) for ret in pool.imap(f, target_files): for (key, obj) in ret: db[key]", "bar: c = 0 for token in tokenizer.tokenize(self[key]['text']): if token.text.lower() in words: c", "for token in tokenizer.tokenize(self[key]['text']): if token.text.lower() in words: c += 1 word_count +=", "o) in g: s = unicode(s) p = unicode(p) o = unicode(o) abs_match_obj", "in_dir=in_dir) for ret in pool.imap(f, target_files): for (key, obj) in ret: db[key] =", "partial from multiprocessing.pool import Pool from shelve import DbfilenameShelf from tokenizer import RegexpTokenizer", "min(c, max_text_len) return word_count def _process_file(file_name, in_dir): abs_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)/abstract#offset_(\\d+)_(\\d+)$') dbp_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)$')", "= dbp_matcher.match(o) if match_obj: link_title = urllib.unquote(match_obj.group(1).encode('utf-8')) link_title = link_title.decode('utf-8').replace(u'_', u' ') mention_titles[title][s]", "for f in sorted(os.listdir(in_dir)) if f.endswith('ttl.gz')] with closing(Pool(pool_size)) as pool: f = partial(_process_file,", "db[key] = obj def count_valid_words(self, vocab, max_text_len): tokenizer = RegexpTokenizer() keys = self.keys()", "= defaultdict(dict) mention_titles = defaultdict(dict) for (s, p, o) in g: s =", "= urllib.unquote(match_obj.group(1).encode('utf-8')) link_title = link_title.decode('utf-8').replace(u'_', u' ') mention_titles[title][s] = link_title ret = []", "in texts.iteritems(): links = [] for (key, link_title) in mention_titles[title].items(): (name, span) =", "DbfilenameShelf.__init__(self, *args, **kwargs) @staticmethod def build(in_dir, out_file, pool_size): with closing(AbstractDB(out_file, protocol=-1)) as db:", "abs_match_obj = abs_matcher.match(s) title = urllib.unquote(urllib.unquote(abs_match_obj.group(1).encode('utf-8'))) title = title.decode('utf-8').replace(u'_', u' ') if p", "(key, obj) in ret: db[key] = obj def count_valid_words(self, vocab, max_text_len): tokenizer =", "frozenset(list(vocab.words())) word_count = 0 with click.progressbar(keys) as bar: for key in bar: c", "abs_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)/abstract#offset_(\\d+)_(\\d+)$') dbp_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)$') click.echo('Processing %s' % file_name) g = rdflib.Graph()", "pool: f = partial(_process_file, in_dir=in_dir) for ret in pool.imap(f, target_files): for (key, obj)", "match_obj: link_title = urllib.unquote(match_obj.group(1).encode('utf-8')) link_title = link_title.decode('utf-8').replace(u'_', u' ') mention_titles[title][s] = link_title ret", "f.endswith('ttl.gz')] with closing(Pool(pool_size)) as pool: f = partial(_process_file, in_dir=in_dir) for ret in pool.imap(f,", "import gzip import os import rdflib import re import urllib from collections import", "') if p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#isString': texts[title] = o elif p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#anchorOf': span", "f = partial(_process_file, in_dir=in_dir) for ret in pool.imap(f, target_files): for (key, obj) in", "tokenizer.tokenize(self[key]['text']): if token.text.lower() in words: c += 1 word_count += min(c, max_text_len) return", "+= 1 word_count += min(c, max_text_len) return word_count def _process_file(file_name, in_dir): abs_matcher =", "protocol=-1)) as db: target_files = [f for f in sorted(os.listdir(in_dir)) if f.endswith('ttl.gz')] with", "functools import partial from multiprocessing.pool import Pool from shelve import DbfilenameShelf from tokenizer", "defaultdict(dict) mention_titles = defaultdict(dict) for (s, p, o) in g: s = unicode(s)", "AbstractDB(DbfilenameShelf): def __init__(self, *args, **kwargs): DbfilenameShelf.__init__(self, *args, **kwargs) @staticmethod def build(in_dir, out_file, pool_size):", "title = title.decode('utf-8').replace(u'_', u' ') if p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#isString': texts[title] = o elif", "= defaultdict(dict) for (s, p, o) in g: s = unicode(s) p =", "with closing(AbstractDB(out_file, protocol=-1)) as db: target_files = [f for f in sorted(os.listdir(in_dir)) if", "sorted(os.listdir(in_dir)) if f.endswith('ttl.gz')] with closing(Pool(pool_size)) as pool: f = partial(_process_file, in_dir=in_dir) for ret", "elif p == u'http://www.w3.org/2005/11/its/rdf#taIdentRef': match_obj = dbp_matcher.match(o) if match_obj: link_title = urllib.unquote(match_obj.group(1).encode('utf-8')) link_title", "*args, **kwargs) @staticmethod def build(in_dir, out_file, pool_size): with closing(AbstractDB(out_file, protocol=-1)) as db: target_files", "dbp_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)$') click.echo('Processing %s' % file_name) g = rdflib.Graph() with gzip.GzipFile(os.path.join(in_dir, file_name))", "mentions = defaultdict(dict) mention_titles = defaultdict(dict) for (s, p, o) in g: s", "import re import urllib from collections import defaultdict from contextlib import closing from", "Pool from shelve import DbfilenameShelf from tokenizer import RegexpTokenizer class AbstractDB(DbfilenameShelf): def __init__(self,", "if f.endswith('ttl.gz')] with closing(Pool(pool_size)) as pool: f = partial(_process_file, in_dir=in_dir) for ret in", "link_title = link_title.decode('utf-8').replace(u'_', u' ') mention_titles[title][s] = link_title ret = [] for (title,", "u' ') mention_titles[title][s] = link_title ret = [] for (title, text) in texts.iteritems():", "for (title, text) in texts.iteritems(): links = [] for (key, link_title) in mention_titles[title].items():", "return word_count def _process_file(file_name, in_dir): abs_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)/abstract#offset_(\\d+)_(\\d+)$') dbp_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)$') click.echo('Processing %s'", "{} mentions = defaultdict(dict) mention_titles = defaultdict(dict) for (s, p, o) in g:", "in_dir): abs_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)/abstract#offset_(\\d+)_(\\d+)$') dbp_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)$') click.echo('Processing %s' % file_name) g =", "re.compile(ur'^http://dbpedia\\.org/resource/(.*)$') click.echo('Processing %s' % file_name) g = rdflib.Graph() with gzip.GzipFile(os.path.join(in_dir, file_name)) as f:", "span = (int(abs_match_obj.group(2)), int(abs_match_obj.group(3))) mentions[title][s] = (o, span) elif p == u'http://www.w3.org/2005/11/its/rdf#taIdentRef': match_obj", "g = rdflib.Graph() with gzip.GzipFile(os.path.join(in_dir, file_name)) as f: g.load(f, format='turtle') texts = {}", "p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#isString': texts[title] = o elif p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#anchorOf': span = (int(abs_match_obj.group(2)),", "from collections import defaultdict from contextlib import closing from functools import partial from", "token.text.lower() in words: c += 1 word_count += min(c, max_text_len) return word_count def", "p, o) in g: s = unicode(s) p = unicode(p) o = unicode(o)", "= re.compile(ur'^http://dbpedia\\.org/resource/(.*)$') click.echo('Processing %s' % file_name) g = rdflib.Graph() with gzip.GzipFile(os.path.join(in_dir, file_name)) as", "word_count += min(c, max_text_len) return word_count def _process_file(file_name, in_dir): abs_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)/abstract#offset_(\\d+)_(\\d+)$') dbp_matcher", "texts[title] = o elif p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#anchorOf': span = (int(abs_match_obj.group(2)), int(abs_match_obj.group(3))) mentions[title][s] =", "from contextlib import closing from functools import partial from multiprocessing.pool import Pool from", "%s' % file_name) g = rdflib.Graph() with gzip.GzipFile(os.path.join(in_dir, file_name)) as f: g.load(f, format='turtle')", "ret: db[key] = obj def count_valid_words(self, vocab, max_text_len): tokenizer = RegexpTokenizer() keys =", "urllib.unquote(urllib.unquote(abs_match_obj.group(1).encode('utf-8'))) title = title.decode('utf-8').replace(u'_', u' ') if p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#isString': texts[title] = o", "in pool.imap(f, target_files): for (key, obj) in ret: db[key] = obj def count_valid_words(self,", "u' ') if p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#isString': texts[title] = o elif p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#anchorOf':", "build(in_dir, out_file, pool_size): with closing(AbstractDB(out_file, protocol=-1)) as db: target_files = [f for f", "(int(abs_match_obj.group(2)), int(abs_match_obj.group(3))) mentions[title][s] = (o, span) elif p == u'http://www.w3.org/2005/11/its/rdf#taIdentRef': match_obj = dbp_matcher.match(o)", "mentions[title][s] = (o, span) elif p == u'http://www.w3.org/2005/11/its/rdf#taIdentRef': match_obj = dbp_matcher.match(o) if match_obj:", "unicode(p) o = unicode(o) abs_match_obj = abs_matcher.match(s) title = urllib.unquote(urllib.unquote(abs_match_obj.group(1).encode('utf-8'))) title = title.decode('utf-8').replace(u'_',", "file_name) g = rdflib.Graph() with gzip.GzipFile(os.path.join(in_dir, file_name)) as f: g.load(f, format='turtle') texts =", "as f: g.load(f, format='turtle') texts = {} mentions = defaultdict(dict) mention_titles = defaultdict(dict)", "g.load(f, format='turtle') texts = {} mentions = defaultdict(dict) mention_titles = defaultdict(dict) for (s,", "closing from functools import partial from multiprocessing.pool import Pool from shelve import DbfilenameShelf", "in tokenizer.tokenize(self[key]['text']): if token.text.lower() in words: c += 1 word_count += min(c, max_text_len)", "c += 1 word_count += min(c, max_text_len) return word_count def _process_file(file_name, in_dir): abs_matcher", "unicode(o) abs_match_obj = abs_matcher.match(s) title = urllib.unquote(urllib.unquote(abs_match_obj.group(1).encode('utf-8'))) title = title.decode('utf-8').replace(u'_', u' ') if", "link_title.decode('utf-8').replace(u'_', u' ') mention_titles[title][s] = link_title ret = [] for (title, text) in", "max_text_len) return word_count def _process_file(file_name, in_dir): abs_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)/abstract#offset_(\\d+)_(\\d+)$') dbp_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)$') click.echo('Processing", "import click import gzip import os import rdflib import re import urllib from", "import RegexpTokenizer class AbstractDB(DbfilenameShelf): def __init__(self, *args, **kwargs): DbfilenameShelf.__init__(self, *args, **kwargs) @staticmethod def", "token in tokenizer.tokenize(self[key]['text']): if token.text.lower() in words: c += 1 word_count += min(c,", "') mention_titles[title][s] = link_title ret = [] for (title, text) in texts.iteritems(): links", "= [f for f in sorted(os.listdir(in_dir)) if f.endswith('ttl.gz')] with closing(Pool(pool_size)) as pool: f", "import DbfilenameShelf from tokenizer import RegexpTokenizer class AbstractDB(DbfilenameShelf): def __init__(self, *args, **kwargs): DbfilenameShelf.__init__(self,", "links = [] for (key, link_title) in mention_titles[title].items(): (name, span) = mentions[title][key] links.append((name,", "coding: utf-8 -*- import click import gzip import os import rdflib import re", "[f for f in sorted(os.listdir(in_dir)) if f.endswith('ttl.gz')] with closing(Pool(pool_size)) as pool: f =", "def count_valid_words(self, vocab, max_text_len): tokenizer = RegexpTokenizer() keys = self.keys() words = frozenset(list(vocab.words()))", "text) in texts.iteritems(): links = [] for (key, link_title) in mention_titles[title].items(): (name, span)", "import os import rdflib import re import urllib from collections import defaultdict from", "p = unicode(p) o = unicode(o) abs_match_obj = abs_matcher.match(s) title = urllib.unquote(urllib.unquote(abs_match_obj.group(1).encode('utf-8'))) title", "title = urllib.unquote(urllib.unquote(abs_match_obj.group(1).encode('utf-8'))) title = title.decode('utf-8').replace(u'_', u' ') if p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#isString': texts[title]", "abs_matcher.match(s) title = urllib.unquote(urllib.unquote(abs_match_obj.group(1).encode('utf-8'))) title = title.decode('utf-8').replace(u'_', u' ') if p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#isString':", "= o elif p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#anchorOf': span = (int(abs_match_obj.group(2)), int(abs_match_obj.group(3))) mentions[title][s] = (o,", "g: s = unicode(s) p = unicode(p) o = unicode(o) abs_match_obj = abs_matcher.match(s)", "in bar: c = 0 for token in tokenizer.tokenize(self[key]['text']): if token.text.lower() in words:", "dbp_matcher.match(o) if match_obj: link_title = urllib.unquote(match_obj.group(1).encode('utf-8')) link_title = link_title.decode('utf-8').replace(u'_', u' ') mention_titles[title][s] =", "link_title ret = [] for (title, text) in texts.iteritems(): links = [] for", "mention_titles[title].items(): (name, span) = mentions[title][key] links.append((name, link_title, span)) ret.append((title.encode('utf-8'), dict(title=title, text=text, links=links))) return", "gzip.GzipFile(os.path.join(in_dir, file_name)) as f: g.load(f, format='turtle') texts = {} mentions = defaultdict(dict) mention_titles", "key in bar: c = 0 for token in tokenizer.tokenize(self[key]['text']): if token.text.lower() in", "import rdflib import re import urllib from collections import defaultdict from contextlib import", "urllib.unquote(match_obj.group(1).encode('utf-8')) link_title = link_title.decode('utf-8').replace(u'_', u' ') mention_titles[title][s] = link_title ret = [] for", "texts = {} mentions = defaultdict(dict) mention_titles = defaultdict(dict) for (s, p, o)", "int(abs_match_obj.group(3))) mentions[title][s] = (o, span) elif p == u'http://www.w3.org/2005/11/its/rdf#taIdentRef': match_obj = dbp_matcher.match(o) if", "= (int(abs_match_obj.group(2)), int(abs_match_obj.group(3))) mentions[title][s] = (o, span) elif p == u'http://www.w3.org/2005/11/its/rdf#taIdentRef': match_obj =", "texts.iteritems(): links = [] for (key, link_title) in mention_titles[title].items(): (name, span) = mentions[title][key]", "rdflib import re import urllib from collections import defaultdict from contextlib import closing", "for (key, link_title) in mention_titles[title].items(): (name, span) = mentions[title][key] links.append((name, link_title, span)) ret.append((title.encode('utf-8'),", "from multiprocessing.pool import Pool from shelve import DbfilenameShelf from tokenizer import RegexpTokenizer class", "closing(AbstractDB(out_file, protocol=-1)) as db: target_files = [f for f in sorted(os.listdir(in_dir)) if f.endswith('ttl.gz')]", "obj) in ret: db[key] = obj def count_valid_words(self, vocab, max_text_len): tokenizer = RegexpTokenizer()", "self.keys() words = frozenset(list(vocab.words())) word_count = 0 with click.progressbar(keys) as bar: for key", "target_files): for (key, obj) in ret: db[key] = obj def count_valid_words(self, vocab, max_text_len):", "p == u'http://www.w3.org/2005/11/its/rdf#taIdentRef': match_obj = dbp_matcher.match(o) if match_obj: link_title = urllib.unquote(match_obj.group(1).encode('utf-8')) link_title =", "= re.compile(ur'^http://dbpedia\\.org/resource/(.*)/abstract#offset_(\\d+)_(\\d+)$') dbp_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)$') click.echo('Processing %s' % file_name) g = rdflib.Graph() with", "= link_title.decode('utf-8').replace(u'_', u' ') mention_titles[title][s] = link_title ret = [] for (title, text)", "= [] for (key, link_title) in mention_titles[title].items(): (name, span) = mentions[title][key] links.append((name, link_title,", "with gzip.GzipFile(os.path.join(in_dir, file_name)) as f: g.load(f, format='turtle') texts = {} mentions = defaultdict(dict)", "u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#anchorOf': span = (int(abs_match_obj.group(2)), int(abs_match_obj.group(3))) mentions[title][s] = (o, span) elif p == u'http://www.w3.org/2005/11/its/rdf#taIdentRef':", "(key, link_title) in mention_titles[title].items(): (name, span) = mentions[title][key] links.append((name, link_title, span)) ret.append((title.encode('utf-8'), dict(title=title,", "== u'http://www.w3.org/2005/11/its/rdf#taIdentRef': match_obj = dbp_matcher.match(o) if match_obj: link_title = urllib.unquote(match_obj.group(1).encode('utf-8')) link_title = link_title.decode('utf-8').replace(u'_',", "keys = self.keys() words = frozenset(list(vocab.words())) word_count = 0 with click.progressbar(keys) as bar:", "utf-8 -*- import click import gzip import os import rdflib import re import", "as bar: for key in bar: c = 0 for token in tokenizer.tokenize(self[key]['text']):", "= rdflib.Graph() with gzip.GzipFile(os.path.join(in_dir, file_name)) as f: g.load(f, format='turtle') texts = {} mentions", "= partial(_process_file, in_dir=in_dir) for ret in pool.imap(f, target_files): for (key, obj) in ret:", "word_count def _process_file(file_name, in_dir): abs_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)/abstract#offset_(\\d+)_(\\d+)$') dbp_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)$') click.echo('Processing %s' %", "+= min(c, max_text_len) return word_count def _process_file(file_name, in_dir): abs_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)/abstract#offset_(\\d+)_(\\d+)$') dbp_matcher =", "(o, span) elif p == u'http://www.w3.org/2005/11/its/rdf#taIdentRef': match_obj = dbp_matcher.match(o) if match_obj: link_title =", "**kwargs): DbfilenameShelf.__init__(self, *args, **kwargs) @staticmethod def build(in_dir, out_file, pool_size): with closing(AbstractDB(out_file, protocol=-1)) as", "unicode(s) p = unicode(p) o = unicode(o) abs_match_obj = abs_matcher.match(s) title = urllib.unquote(urllib.unquote(abs_match_obj.group(1).encode('utf-8')))", "= unicode(s) p = unicode(p) o = unicode(o) abs_match_obj = abs_matcher.match(s) title =", "f: g.load(f, format='turtle') texts = {} mentions = defaultdict(dict) mention_titles = defaultdict(dict) for", "[] for (title, text) in texts.iteritems(): links = [] for (key, link_title) in", "in sorted(os.listdir(in_dir)) if f.endswith('ttl.gz')] with closing(Pool(pool_size)) as pool: f = partial(_process_file, in_dir=in_dir) for", "span) elif p == u'http://www.w3.org/2005/11/its/rdf#taIdentRef': match_obj = dbp_matcher.match(o) if match_obj: link_title = urllib.unquote(match_obj.group(1).encode('utf-8'))", "for (key, obj) in ret: db[key] = obj def count_valid_words(self, vocab, max_text_len): tokenizer", "pool.imap(f, target_files): for (key, obj) in ret: db[key] = obj def count_valid_words(self, vocab,", "== u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#anchorOf': span = (int(abs_match_obj.group(2)), int(abs_match_obj.group(3))) mentions[title][s] = (o, span) elif p ==", "words: c += 1 word_count += min(c, max_text_len) return word_count def _process_file(file_name, in_dir):", "from functools import partial from multiprocessing.pool import Pool from shelve import DbfilenameShelf from", "file_name)) as f: g.load(f, format='turtle') texts = {} mentions = defaultdict(dict) mention_titles =", "def build(in_dir, out_file, pool_size): with closing(AbstractDB(out_file, protocol=-1)) as db: target_files = [f for", "bar: for key in bar: c = 0 for token in tokenizer.tokenize(self[key]['text']): if", "count_valid_words(self, vocab, max_text_len): tokenizer = RegexpTokenizer() keys = self.keys() words = frozenset(list(vocab.words())) word_count", "for key in bar: c = 0 for token in tokenizer.tokenize(self[key]['text']): if token.text.lower()", "class AbstractDB(DbfilenameShelf): def __init__(self, *args, **kwargs): DbfilenameShelf.__init__(self, *args, **kwargs) @staticmethod def build(in_dir, out_file,", "_process_file(file_name, in_dir): abs_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)/abstract#offset_(\\d+)_(\\d+)$') dbp_matcher = re.compile(ur'^http://dbpedia\\.org/resource/(.*)$') click.echo('Processing %s' % file_name) g", "[] for (key, link_title) in mention_titles[title].items(): (name, span) = mentions[title][key] links.append((name, link_title, span))", "= urllib.unquote(urllib.unquote(abs_match_obj.group(1).encode('utf-8'))) title = title.decode('utf-8').replace(u'_', u' ') if p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#isString': texts[title] =", "= 0 for token in tokenizer.tokenize(self[key]['text']): if token.text.lower() in words: c += 1", "urllib from collections import defaultdict from contextlib import closing from functools import partial", "def __init__(self, *args, **kwargs): DbfilenameShelf.__init__(self, *args, **kwargs) @staticmethod def build(in_dir, out_file, pool_size): with", "f in sorted(os.listdir(in_dir)) if f.endswith('ttl.gz')] with closing(Pool(pool_size)) as pool: f = partial(_process_file, in_dir=in_dir)", "in words: c += 1 word_count += min(c, max_text_len) return word_count def _process_file(file_name,", "= self.keys() words = frozenset(list(vocab.words())) word_count = 0 with click.progressbar(keys) as bar: for", "as db: target_files = [f for f in sorted(os.listdir(in_dir)) if f.endswith('ttl.gz')] with closing(Pool(pool_size))", "= 0 with click.progressbar(keys) as bar: for key in bar: c = 0", "os import rdflib import re import urllib from collections import defaultdict from contextlib", "rdflib.Graph() with gzip.GzipFile(os.path.join(in_dir, file_name)) as f: g.load(f, format='turtle') texts = {} mentions =", "defaultdict(dict) for (s, p, o) in g: s = unicode(s) p = unicode(p)", "s = unicode(s) p = unicode(p) o = unicode(o) abs_match_obj = abs_matcher.match(s) title", "= (o, span) elif p == u'http://www.w3.org/2005/11/its/rdf#taIdentRef': match_obj = dbp_matcher.match(o) if match_obj: link_title", "target_files = [f for f in sorted(os.listdir(in_dir)) if f.endswith('ttl.gz')] with closing(Pool(pool_size)) as pool:", "(s, p, o) in g: s = unicode(s) p = unicode(p) o =", "% file_name) g = rdflib.Graph() with gzip.GzipFile(os.path.join(in_dir, file_name)) as f: g.load(f, format='turtle') texts", "-*- coding: utf-8 -*- import click import gzip import os import rdflib import", "import partial from multiprocessing.pool import Pool from shelve import DbfilenameShelf from tokenizer import", "from tokenizer import RegexpTokenizer class AbstractDB(DbfilenameShelf): def __init__(self, *args, **kwargs): DbfilenameShelf.__init__(self, *args, **kwargs)", "*args, **kwargs): DbfilenameShelf.__init__(self, *args, **kwargs) @staticmethod def build(in_dir, out_file, pool_size): with closing(AbstractDB(out_file, protocol=-1))", "0 with click.progressbar(keys) as bar: for key in bar: c = 0 for", "= unicode(o) abs_match_obj = abs_matcher.match(s) title = urllib.unquote(urllib.unquote(abs_match_obj.group(1).encode('utf-8'))) title = title.decode('utf-8').replace(u'_', u' ')", "if p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#isString': texts[title] = o elif p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#anchorOf': span =", "== u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#isString': texts[title] = o elif p == u'http://persistence.uni-leipzig.org/nlp2rdf/ontologies/nif-core#anchorOf': span = (int(abs_match_obj.group(2)), int(abs_match_obj.group(3)))", "RegexpTokenizer() keys = self.keys() words = frozenset(list(vocab.words())) word_count = 0 with click.progressbar(keys) as" ]
[ "import render_template def index(): return render_template('index.html') def documentation(): return render_template('documentation.html') def api_landing(): return", "render_template def index(): return render_template('index.html') def documentation(): return render_template('documentation.html') def api_landing(): return render_template('api_landing.html')", "from flask import render_template def index(): return render_template('index.html') def documentation(): return render_template('documentation.html') def", "flask import render_template def index(): return render_template('index.html') def documentation(): return render_template('documentation.html') def api_landing():" ]
[ "and profile pic path to users table Revision ID: 0<PASSWORD> Revises: <PASSWORD> Create", "please adjust! ### op.drop_column('users', 'profile_pic_path') op.drop_column('users', 'biodata') # ### end Alembic commands ###", "op.add_column('users', sa.Column('biodata', sa.String(length=255), nullable=True)) op.add_column('users', sa.Column('profile_pic_path', sa.String(), nullable=True)) # ### end Alembic commands", "sa # revision identifiers, used by Alembic. revision = '<PASSWORD>' down_revision = '<PASSWORD>'", "by Alembic. revision = '<PASSWORD>' down_revision = '<PASSWORD>' branch_labels = None depends_on =", "please adjust! ### op.add_column('users', sa.Column('biodata', sa.String(length=255), nullable=True)) op.add_column('users', sa.Column('profile_pic_path', sa.String(), nullable=True)) # ###", "alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic.", "sa.String(length=255), nullable=True)) op.add_column('users', sa.Column('profile_pic_path', sa.String(), nullable=True)) # ### end Alembic commands ### def", "### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'profile_pic_path') op.drop_column('users',", "op.add_column('users', sa.Column('profile_pic_path', sa.String(), nullable=True)) # ### end Alembic commands ### def downgrade(): #", "= '<PASSWORD>' down_revision = '<PASSWORD>' branch_labels = None depends_on = None def upgrade():", "= '<PASSWORD>' branch_labels = None depends_on = None def upgrade(): # ### commands", "Date: 2021-09-19 11:46:47.937931 \"\"\" from alembic import op import sqlalchemy as sa #", "op import sqlalchemy as sa # revision identifiers, used by Alembic. revision =", "auto generated by Alembic - please adjust! ### op.add_column('users', sa.Column('biodata', sa.String(length=255), nullable=True)) op.add_column('users',", "def downgrade(): # ### commands auto generated by Alembic - please adjust! ###", "# revision identifiers, used by Alembic. revision = '<PASSWORD>' down_revision = '<PASSWORD>' branch_labels", "Alembic - please adjust! ### op.drop_column('users', 'profile_pic_path') op.drop_column('users', 'biodata') # ### end Alembic", "# ### commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'profile_pic_path')", "sa.String(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands", "import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision", "### def downgrade(): # ### commands auto generated by Alembic - please adjust!", "Alembic. revision = '<PASSWORD>' down_revision = '<PASSWORD>' branch_labels = None depends_on = None", "None depends_on = None def upgrade(): # ### commands auto generated by Alembic", "<reponame>WaruiAlfred/sixty-seconds-impression \"\"\"add a bio and profile pic path to users table Revision ID:", "table Revision ID: 0<PASSWORD> Revises: <PASSWORD> Create Date: 2021-09-19 11:46:47.937931 \"\"\" from alembic", "'<PASSWORD>' branch_labels = None depends_on = None def upgrade(): # ### commands auto", "'<PASSWORD>' down_revision = '<PASSWORD>' branch_labels = None depends_on = None def upgrade(): #", "generated by Alembic - please adjust! ### op.drop_column('users', 'profile_pic_path') op.drop_column('users', 'biodata') # ###", "Alembic commands ### def downgrade(): # ### commands auto generated by Alembic -", "ID: 0<PASSWORD> Revises: <PASSWORD> Create Date: 2021-09-19 11:46:47.937931 \"\"\" from alembic import op", "branch_labels = None depends_on = None def upgrade(): # ### commands auto generated", "\"\"\" from alembic import op import sqlalchemy as sa # revision identifiers, used", "downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('users',", "end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic", "Alembic - please adjust! ### op.add_column('users', sa.Column('biodata', sa.String(length=255), nullable=True)) op.add_column('users', sa.Column('profile_pic_path', sa.String(), nullable=True))", "auto generated by Alembic - please adjust! ### op.drop_column('users', 'profile_pic_path') op.drop_column('users', 'biodata') #", "sa.Column('biodata', sa.String(length=255), nullable=True)) op.add_column('users', sa.Column('profile_pic_path', sa.String(), nullable=True)) # ### end Alembic commands ###", "def upgrade(): # ### commands auto generated by Alembic - please adjust! ###", "revision = '<PASSWORD>' down_revision = '<PASSWORD>' branch_labels = None depends_on = None def", "### commands auto generated by Alembic - please adjust! ### op.add_column('users', sa.Column('biodata', sa.String(length=255),", "commands ### def downgrade(): # ### commands auto generated by Alembic - please", "\"\"\"add a bio and profile pic path to users table Revision ID: 0<PASSWORD>", "11:46:47.937931 \"\"\" from alembic import op import sqlalchemy as sa # revision identifiers,", "down_revision = '<PASSWORD>' branch_labels = None depends_on = None def upgrade(): # ###", "sa.Column('profile_pic_path', sa.String(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ###", "= None depends_on = None def upgrade(): # ### commands auto generated by", "Revision ID: 0<PASSWORD> Revises: <PASSWORD> Create Date: 2021-09-19 11:46:47.937931 \"\"\" from alembic import", "0<PASSWORD> Revises: <PASSWORD> Create Date: 2021-09-19 11:46:47.937931 \"\"\" from alembic import op import", "commands auto generated by Alembic - please adjust! ### op.drop_column('users', 'profile_pic_path') op.drop_column('users', 'biodata')", "users table Revision ID: 0<PASSWORD> Revises: <PASSWORD> Create Date: 2021-09-19 11:46:47.937931 \"\"\" from", "### op.add_column('users', sa.Column('biodata', sa.String(length=255), nullable=True)) op.add_column('users', sa.Column('profile_pic_path', sa.String(), nullable=True)) # ### end Alembic", "import sqlalchemy as sa # revision identifiers, used by Alembic. revision = '<PASSWORD>'", "= None def upgrade(): # ### commands auto generated by Alembic - please", "by Alembic - please adjust! ### op.add_column('users', sa.Column('biodata', sa.String(length=255), nullable=True)) op.add_column('users', sa.Column('profile_pic_path', sa.String(),", "sqlalchemy as sa # revision identifiers, used by Alembic. revision = '<PASSWORD>' down_revision", "Revises: <PASSWORD> Create Date: 2021-09-19 11:46:47.937931 \"\"\" from alembic import op import sqlalchemy", "to users table Revision ID: 0<PASSWORD> Revises: <PASSWORD> Create Date: 2021-09-19 11:46:47.937931 \"\"\"", "None def upgrade(): # ### commands auto generated by Alembic - please adjust!", "depends_on = None def upgrade(): # ### commands auto generated by Alembic -", "adjust! ### op.add_column('users', sa.Column('biodata', sa.String(length=255), nullable=True)) op.add_column('users', sa.Column('profile_pic_path', sa.String(), nullable=True)) # ### end", "bio and profile pic path to users table Revision ID: 0<PASSWORD> Revises: <PASSWORD>", "- please adjust! ### op.add_column('users', sa.Column('biodata', sa.String(length=255), nullable=True)) op.add_column('users', sa.Column('profile_pic_path', sa.String(), nullable=True)) #", "pic path to users table Revision ID: 0<PASSWORD> Revises: <PASSWORD> Create Date: 2021-09-19", "used by Alembic. revision = '<PASSWORD>' down_revision = '<PASSWORD>' branch_labels = None depends_on", "path to users table Revision ID: 0<PASSWORD> Revises: <PASSWORD> Create Date: 2021-09-19 11:46:47.937931", "# ### end Alembic commands ### def downgrade(): # ### commands auto generated", "Create Date: 2021-09-19 11:46:47.937931 \"\"\" from alembic import op import sqlalchemy as sa", "# ### commands auto generated by Alembic - please adjust! ### op.add_column('users', sa.Column('biodata',", "revision identifiers, used by Alembic. revision = '<PASSWORD>' down_revision = '<PASSWORD>' branch_labels =", "profile pic path to users table Revision ID: 0<PASSWORD> Revises: <PASSWORD> Create Date:", "2021-09-19 11:46:47.937931 \"\"\" from alembic import op import sqlalchemy as sa # revision", "from alembic import op import sqlalchemy as sa # revision identifiers, used by", "upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('users',", "nullable=True)) op.add_column('users', sa.Column('profile_pic_path', sa.String(), nullable=True)) # ### end Alembic commands ### def downgrade():", "a bio and profile pic path to users table Revision ID: 0<PASSWORD> Revises:", "identifiers, used by Alembic. revision = '<PASSWORD>' down_revision = '<PASSWORD>' branch_labels = None", "### end Alembic commands ### def downgrade(): # ### commands auto generated by", "- please adjust! ### op.drop_column('users', 'profile_pic_path') op.drop_column('users', 'biodata') # ### end Alembic commands", "<PASSWORD> Create Date: 2021-09-19 11:46:47.937931 \"\"\" from alembic import op import sqlalchemy as", "by Alembic - please adjust! ### op.drop_column('users', 'profile_pic_path') op.drop_column('users', 'biodata') # ### end", "nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto", "commands auto generated by Alembic - please adjust! ### op.add_column('users', sa.Column('biodata', sa.String(length=255), nullable=True))", "generated by Alembic - please adjust! ### op.add_column('users', sa.Column('biodata', sa.String(length=255), nullable=True)) op.add_column('users', sa.Column('profile_pic_path',", "as sa # revision identifiers, used by Alembic. revision = '<PASSWORD>' down_revision =" ]
[ "setuptools import setup setup( name=\"psycopg2_error_handler\", install_requires=[ \"psycopg-binary >= 3.0\", ], packages=[\"psycopg2_error\"], version='0.0.4', description='Psycopg2", "name=\"psycopg2_error_handler\", install_requires=[ \"psycopg-binary >= 3.0\", ], packages=[\"psycopg2_error\"], version='0.0.4', description='Psycopg2 Error Handler', author='<NAME>', license='MIT',", "from setuptools import setup setup( name=\"psycopg2_error_handler\", install_requires=[ \"psycopg-binary >= 3.0\", ], packages=[\"psycopg2_error\"], version='0.0.4',", "setup( name=\"psycopg2_error_handler\", install_requires=[ \"psycopg-binary >= 3.0\", ], packages=[\"psycopg2_error\"], version='0.0.4', description='Psycopg2 Error Handler', author='<NAME>',", "import setup setup( name=\"psycopg2_error_handler\", install_requires=[ \"psycopg-binary >= 3.0\", ], packages=[\"psycopg2_error\"], version='0.0.4', description='Psycopg2 Error", "install_requires=[ \"psycopg-binary >= 3.0\", ], packages=[\"psycopg2_error\"], version='0.0.4', description='Psycopg2 Error Handler', author='<NAME>', license='MIT', )", "setup setup( name=\"psycopg2_error_handler\", install_requires=[ \"psycopg-binary >= 3.0\", ], packages=[\"psycopg2_error\"], version='0.0.4', description='Psycopg2 Error Handler',", "<reponame>lopatinay/psycopg2_error_handler from setuptools import setup setup( name=\"psycopg2_error_handler\", install_requires=[ \"psycopg-binary >= 3.0\", ], packages=[\"psycopg2_error\"]," ]
[ "models from .validators import validate_file_extension # Create your models here. class NormalProject(models.Model): name", "DocumentNormalMetadataRelation(models.Model): metadata = models.ForeignKey(NormalMetadata, related_name='metadata', on_delete=models.CASCADE) document = models.ForeignKey(Document, related_name='document', on_delete=models.CASCADE) data =", "= models.ManyToManyField(settings.AUTH_USER_MODEL) relations = models.ManyToManyField(ParallelRelation) public_status = models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def is_public(self):", "def is_public(self): return bool(self.public_status) def is_collab(self): return bool(self.collab_status) def get_project_members(self): return self.project_members def", "= models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status) def get_project_members(self): return self.project_members def __str__(self): return", "= models.ForeignKey(ParallelProject, related_name='project_parallelmetadata', on_delete=models.CASCADE) class DocumentParallelMetadaRelation(models.Model):#falta metadata = models.ForeignKey(ParallelMetadata, related_name=\"parallelmetadata\", on_delete=models.CASCADE) relation =", "project = models.ForeignKey(NormalProject, related_name='project_normalMetadata', on_delete=models.CASCADE) class DocumentNormalMetadataRelation(models.Model): metadata = models.ForeignKey(NormalMetadata, related_name='metadata', on_delete=models.CASCADE) document", "def get_project_members(self): return self.project_members def get_owner(self): return self.owner def set_status_public(self, status): self.public_status=status def", "unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_parallelproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) relations = models.ManyToManyField(ParallelRelation) public_status", "models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status) def is_collab(self): return bool(self.collab_status) def get_project_members(self): return self.project_members", "models.ManyToManyField(ParallelRelation) public_status = models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status) def get_project_members(self):", "def set_status_public(self, status): self.public_status=status def set_status_collab(self, status): self.collab_status=status def __str__(self): return str(self.name) class", "bool(self.public_status) def is_collab(self): return bool(self.collab_status) def get_project_members(self): return self.project_members def get_owner(self): return self.owner", "metadata = models.ForeignKey(ParallelMetadata, related_name=\"parallelmetadata\", on_delete=models.CASCADE) relation = models.ForeignKey(ParallelRelation, related_name=\"relation\", on_delete=models.CASCADE) data = models.CharField(max_length=100,", "= models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL,related_name='owner_document', on_delete=models.CASCADE) project = models.ForeignKey(NormalProject, related_name='project_document', on_delete=models.CASCADE)", "doc_two = models.ManyToManyField(Document, related_name='doc_two') class ParallelProject(models.Model): name = models.CharField(max_length=100, null=False, unique=True) owner =", "return self.project_members def __str__(self): return str(self.name) class ParallelMetadata(models.Model): name = models.CharField(max_length=100, null=False) project", "return bool(self.public_status) def get_project_members(self): return self.project_members def __str__(self): return str(self.name) class ParallelMetadata(models.Model): name", "= models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status) def get_project_members(self): return self.project_members", "= models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_normalproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) public_status = models.BooleanField(default=0) collab_status = models.BooleanField(default=0)", "ParallelRelation(models.Model): doc_one = models.ManyToManyField(Document, related_name='doc_one') doc_two = models.ManyToManyField(Document, related_name='doc_two') class ParallelProject(models.Model): name =", "related_name='project_normalMetadata', on_delete=models.CASCADE) class DocumentNormalMetadataRelation(models.Model): metadata = models.ForeignKey(NormalMetadata, related_name='metadata', on_delete=models.CASCADE) document = models.ForeignKey(Document, related_name='document',", "is_collab(self): return bool(self.collab_status) def get_project_members(self): return self.project_members def get_owner(self): return self.owner def set_status_public(self,", "blank=True, null=True) class ParallelRelation(models.Model): doc_one = models.ManyToManyField(Document, related_name='doc_one') doc_two = models.ManyToManyField(Document, related_name='doc_two') class", "project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) public_status = models.BooleanField(default=0) collab_status = models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def", "project = models.ForeignKey(NormalProject, related_name='project_document', on_delete=models.CASCADE) tagged_doc = models.CharField(max_length=100) timestamp = models.DateTimeField(auto_now_add=True) def set_tagged_doc(self,", "__str__(self): return str(self.name) class Document(models.Model): file = models.FileField(blank=False, null=False, upload_to='mediafiles/', validators=[validate_file_extension]) name =", "def __str__(self): return str(self.name) class Document(models.Model): file = models.FileField(blank=False, null=False, upload_to='mediafiles/', validators=[validate_file_extension]) name", "import settings from django.db import models from .validators import validate_file_extension # Create your", "ParallelProject(models.Model): name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_parallelproject', on_delete=models.CASCADE) project_members =", "= models.ForeignKey(NormalProject, related_name='project_normalMetadata', on_delete=models.CASCADE) class DocumentNormalMetadataRelation(models.Model): metadata = models.ForeignKey(NormalMetadata, related_name='metadata', on_delete=models.CASCADE) document =", "def is_public(self): return bool(self.public_status) def get_project_members(self): return self.project_members def __str__(self): return str(self.name) class", "models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_normalproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) public_status =", "null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_parallelproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) relations = models.ManyToManyField(ParallelRelation)", "= models.BooleanField(default=0) collab_status = models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status) def", "return bool(self.public_status) def is_collab(self): return bool(self.collab_status) def get_project_members(self): return self.project_members def get_owner(self): return", "self.project_members def get_owner(self): return self.owner def set_status_public(self, status): self.public_status=status def set_status_collab(self, status): self.collab_status=status", "models.ManyToManyField(Document, related_name='doc_one') doc_two = models.ManyToManyField(Document, related_name='doc_two') class ParallelProject(models.Model): name = models.CharField(max_length=100, null=False, unique=True)", "def set_file(self, file_url): self.file = file_url class NormalMetadata(models.Model): name = models.CharField(max_length=100, null=False, unique=True)", "tagged_doc = models.CharField(max_length=100) timestamp = models.DateTimeField(auto_now_add=True) def set_tagged_doc(self, file_url): self.tagged_doc = file_url def", "return str(self.name) class Document(models.Model): file = models.FileField(blank=False, null=False, upload_to='mediafiles/', validators=[validate_file_extension]) name = models.CharField(max_length=100,", "import models from .validators import validate_file_extension # Create your models here. class NormalProject(models.Model):", "# Create your models here. class NormalProject(models.Model): name = models.CharField(max_length=100, null=False, unique=True) owner", "= models.FileField(blank=False, null=False, upload_to='mediafiles/', validators=[validate_file_extension]) name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL,related_name='owner_document',", "on_delete=models.CASCADE) document = models.ForeignKey(Document, related_name='document', on_delete=models.CASCADE) data = models.CharField(max_length=100, blank=True, null=True) class ParallelRelation(models.Model):", "on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) public_status = models.BooleanField(default=0) collab_status = models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True)", "from django.db import models from .validators import validate_file_extension # Create your models here.", "= models.ManyToManyField(Document, related_name='doc_one') doc_two = models.ManyToManyField(Document, related_name='doc_two') class ParallelProject(models.Model): name = models.CharField(max_length=100, null=False,", "related_name='doc_one') doc_two = models.ManyToManyField(Document, related_name='doc_two') class ParallelProject(models.Model): name = models.CharField(max_length=100, null=False, unique=True) owner", "str(self.name) class Document(models.Model): file = models.FileField(blank=False, null=False, upload_to='mediafiles/', validators=[validate_file_extension]) name = models.CharField(max_length=100, null=False,", ".validators import validate_file_extension # Create your models here. class NormalProject(models.Model): name = models.CharField(max_length=100,", "self.public_status=status def set_status_collab(self, status): self.collab_status=status def __str__(self): return str(self.name) class Document(models.Model): file =", "class ParallelMetadata(models.Model): name = models.CharField(max_length=100, null=False) project = models.ForeignKey(ParallelProject, related_name='project_parallelmetadata', on_delete=models.CASCADE) class DocumentParallelMetadaRelation(models.Model):#falta", "file_url class NormalMetadata(models.Model): name = models.CharField(max_length=100, null=False, unique=True) project = models.ForeignKey(NormalProject, related_name='project_normalMetadata', on_delete=models.CASCADE)", "NormalMetadata(models.Model): name = models.CharField(max_length=100, null=False, unique=True) project = models.ForeignKey(NormalProject, related_name='project_normalMetadata', on_delete=models.CASCADE) class DocumentNormalMetadataRelation(models.Model):", "name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_normalproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL)", "= models.CharField(max_length=100) timestamp = models.DateTimeField(auto_now_add=True) def set_tagged_doc(self, file_url): self.tagged_doc = file_url def set_file(self,", "here. class NormalProject(models.Model): name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_normalproject', on_delete=models.CASCADE)", "file_url): self.tagged_doc = file_url def set_file(self, file_url): self.file = file_url class NormalMetadata(models.Model): name", "def set_tagged_doc(self, file_url): self.tagged_doc = file_url def set_file(self, file_url): self.file = file_url class", "your models here. class NormalProject(models.Model): name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL,", "null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_normalproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) public_status = models.BooleanField(default=0)", "name = models.CharField(max_length=100, null=False, unique=True) project = models.ForeignKey(NormalProject, related_name='project_normalMetadata', on_delete=models.CASCADE) class DocumentNormalMetadataRelation(models.Model): metadata", "__str__(self): return str(self.name) class ParallelMetadata(models.Model): name = models.CharField(max_length=100, null=False) project = models.ForeignKey(ParallelProject, related_name='project_parallelmetadata',", "class NormalMetadata(models.Model): name = models.CharField(max_length=100, null=False, unique=True) project = models.ForeignKey(NormalProject, related_name='project_normalMetadata', on_delete=models.CASCADE) class", "set_file(self, file_url): self.file = file_url class NormalMetadata(models.Model): name = models.CharField(max_length=100, null=False, unique=True) project", "= models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_parallelproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) relations = models.ManyToManyField(ParallelRelation) public_status = models.BooleanField(default=0)", "= models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status) def is_collab(self): return bool(self.collab_status)", "= models.ForeignKey(NormalProject, related_name='project_document', on_delete=models.CASCADE) tagged_doc = models.CharField(max_length=100) timestamp = models.DateTimeField(auto_now_add=True) def set_tagged_doc(self, file_url):", "on_delete=models.CASCADE) class DocumentParallelMetadaRelation(models.Model):#falta metadata = models.ForeignKey(ParallelMetadata, related_name=\"parallelmetadata\", on_delete=models.CASCADE) relation = models.ForeignKey(ParallelRelation, related_name=\"relation\", on_delete=models.CASCADE)", "= models.ManyToManyField(ParallelRelation) public_status = models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status) def", "name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL,related_name='owner_document', on_delete=models.CASCADE) project = models.ForeignKey(NormalProject, related_name='project_document',", "related_name='owner_parallelproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) relations = models.ManyToManyField(ParallelRelation) public_status = models.BooleanField(default=0) timestamp =", "= models.ForeignKey(ParallelMetadata, related_name=\"parallelmetadata\", on_delete=models.CASCADE) relation = models.ForeignKey(ParallelRelation, related_name=\"relation\", on_delete=models.CASCADE) data = models.CharField(max_length=100, blank=True,", "= models.CharField(max_length=100, blank=True, null=True) class ParallelRelation(models.Model): doc_one = models.ManyToManyField(Document, related_name='doc_one') doc_two = models.ManyToManyField(Document,", "models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_normalproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) public_status = models.BooleanField(default=0) collab_status = models.BooleanField(default=0) timestamp", "def is_collab(self): return bool(self.collab_status) def get_project_members(self): return self.project_members def get_owner(self): return self.owner def", "on_delete=models.CASCADE) data = models.CharField(max_length=100, blank=True, null=True) class ParallelRelation(models.Model): doc_one = models.ManyToManyField(Document, related_name='doc_one') doc_two", "return self.project_members def get_owner(self): return self.owner def set_status_public(self, status): self.public_status=status def set_status_collab(self, status):", "= models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status) def is_collab(self): return bool(self.collab_status) def get_project_members(self): return", "import validate_file_extension # Create your models here. class NormalProject(models.Model): name = models.CharField(max_length=100, null=False,", "get_project_members(self): return self.project_members def __str__(self): return str(self.name) class ParallelMetadata(models.Model): name = models.CharField(max_length=100, null=False)", "models here. class NormalProject(models.Model): name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_normalproject',", "models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL,related_name='owner_document', on_delete=models.CASCADE) project = models.ForeignKey(NormalProject, related_name='project_document', on_delete=models.CASCADE) tagged_doc", "models.DateTimeField(auto_now_add=True) def set_tagged_doc(self, file_url): self.tagged_doc = file_url def set_file(self, file_url): self.file = file_url", "project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) relations = models.ManyToManyField(ParallelRelation) public_status = models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def", "owner = models.ForeignKey(settings.AUTH_USER_MODEL,related_name='owner_document', on_delete=models.CASCADE) project = models.ForeignKey(NormalProject, related_name='project_document', on_delete=models.CASCADE) tagged_doc = models.CharField(max_length=100) timestamp", "upload_to='mediafiles/', validators=[validate_file_extension]) name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL,related_name='owner_document', on_delete=models.CASCADE) project =", "def get_project_members(self): return self.project_members def __str__(self): return str(self.name) class ParallelMetadata(models.Model): name = models.CharField(max_length=100,", "= models.ManyToManyField(settings.AUTH_USER_MODEL) public_status = models.BooleanField(default=0) collab_status = models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def is_public(self):", "= file_url class NormalMetadata(models.Model): name = models.CharField(max_length=100, null=False, unique=True) project = models.ForeignKey(NormalProject, related_name='project_normalMetadata',", "self.project_members def __str__(self): return str(self.name) class ParallelMetadata(models.Model): name = models.CharField(max_length=100, null=False) project =", "def __str__(self): return str(self.name) class ParallelMetadata(models.Model): name = models.CharField(max_length=100, null=False) project = models.ForeignKey(ParallelProject,", "public_status = models.BooleanField(default=0) collab_status = models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status)", "set_status_collab(self, status): self.collab_status=status def __str__(self): return str(self.name) class Document(models.Model): file = models.FileField(blank=False, null=False,", "status): self.public_status=status def set_status_collab(self, status): self.collab_status=status def __str__(self): return str(self.name) class Document(models.Model): file", "metadata = models.ForeignKey(NormalMetadata, related_name='metadata', on_delete=models.CASCADE) document = models.ForeignKey(Document, related_name='document', on_delete=models.CASCADE) data = models.CharField(max_length=100,", "models.ManyToManyField(Document, related_name='doc_two') class ParallelProject(models.Model): name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_parallelproject',", "get_project_members(self): return self.project_members def get_owner(self): return self.owner def set_status_public(self, status): self.public_status=status def set_status_collab(self,", "= models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_parallelproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) relations", "timestamp = models.DateTimeField(auto_now_add=True) def set_tagged_doc(self, file_url): self.tagged_doc = file_url def set_file(self, file_url): self.file", "null=False) project = models.ForeignKey(ParallelProject, related_name='project_parallelmetadata', on_delete=models.CASCADE) class DocumentParallelMetadaRelation(models.Model):#falta metadata = models.ForeignKey(ParallelMetadata, related_name=\"parallelmetadata\", on_delete=models.CASCADE)", "return bool(self.collab_status) def get_project_members(self): return self.project_members def get_owner(self): return self.owner def set_status_public(self, status):", "null=False, upload_to='mediafiles/', validators=[validate_file_extension]) name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL,related_name='owner_document', on_delete=models.CASCADE) project", "= models.CharField(max_length=100, null=False, unique=True) project = models.ForeignKey(NormalProject, related_name='project_normalMetadata', on_delete=models.CASCADE) class DocumentNormalMetadataRelation(models.Model): metadata =", "models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_parallelproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) relations =", "set_status_public(self, status): self.public_status=status def set_status_collab(self, status): self.collab_status=status def __str__(self): return str(self.name) class Document(models.Model):", "models.CharField(max_length=100, blank=True, null=True) class ParallelRelation(models.Model): doc_one = models.ManyToManyField(Document, related_name='doc_one') doc_two = models.ManyToManyField(Document, related_name='doc_two')", "models.ForeignKey(settings.AUTH_USER_MODEL,related_name='owner_document', on_delete=models.CASCADE) project = models.ForeignKey(NormalProject, related_name='project_document', on_delete=models.CASCADE) tagged_doc = models.CharField(max_length=100) timestamp = models.DateTimeField(auto_now_add=True)", "models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status) def get_project_members(self): return self.project_members def", "class Document(models.Model): file = models.FileField(blank=False, null=False, upload_to='mediafiles/', validators=[validate_file_extension]) name = models.CharField(max_length=100, null=False, unique=True)", "bool(self.collab_status) def get_project_members(self): return self.project_members def get_owner(self): return self.owner def set_status_public(self, status): self.public_status=status", "on_delete=models.CASCADE) project = models.ForeignKey(NormalProject, related_name='project_document', on_delete=models.CASCADE) tagged_doc = models.CharField(max_length=100) timestamp = models.DateTimeField(auto_now_add=True) def", "models.ForeignKey(Document, related_name='document', on_delete=models.CASCADE) data = models.CharField(max_length=100, blank=True, null=True) class ParallelRelation(models.Model): doc_one = models.ManyToManyField(Document,", "data = models.CharField(max_length=100, blank=True, null=True) class ParallelRelation(models.Model): doc_one = models.ManyToManyField(Document, related_name='doc_one') doc_two =", "models.CharField(max_length=100, null=False) project = models.ForeignKey(ParallelProject, related_name='project_parallelmetadata', on_delete=models.CASCADE) class DocumentParallelMetadaRelation(models.Model):#falta metadata = models.ForeignKey(ParallelMetadata, related_name=\"parallelmetadata\",", "models.FileField(blank=False, null=False, upload_to='mediafiles/', validators=[validate_file_extension]) name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL,related_name='owner_document', on_delete=models.CASCADE)", "models.ForeignKey(NormalMetadata, related_name='metadata', on_delete=models.CASCADE) document = models.ForeignKey(Document, related_name='document', on_delete=models.CASCADE) data = models.CharField(max_length=100, blank=True, null=True)", "DocumentParallelMetadaRelation(models.Model):#falta metadata = models.ForeignKey(ParallelMetadata, related_name=\"parallelmetadata\", on_delete=models.CASCADE) relation = models.ForeignKey(ParallelRelation, related_name=\"relation\", on_delete=models.CASCADE) data =", "ParallelMetadata(models.Model): name = models.CharField(max_length=100, null=False) project = models.ForeignKey(ParallelProject, related_name='project_parallelmetadata', on_delete=models.CASCADE) class DocumentParallelMetadaRelation(models.Model):#falta metadata", "related_name='project_document', on_delete=models.CASCADE) tagged_doc = models.CharField(max_length=100) timestamp = models.DateTimeField(auto_now_add=True) def set_tagged_doc(self, file_url): self.tagged_doc =", "on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) relations = models.ManyToManyField(ParallelRelation) public_status = models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True)", "owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_normalproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) public_status = models.BooleanField(default=0) collab_status =", "models.BooleanField(default=0) collab_status = models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status) def is_collab(self):", "file_url def set_file(self, file_url): self.file = file_url class NormalMetadata(models.Model): name = models.CharField(max_length=100, null=False,", "timestamp = models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status) def is_collab(self): return bool(self.collab_status) def get_project_members(self):", "document = models.ForeignKey(Document, related_name='document', on_delete=models.CASCADE) data = models.CharField(max_length=100, blank=True, null=True) class ParallelRelation(models.Model): doc_one", "unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL,related_name='owner_document', on_delete=models.CASCADE) project = models.ForeignKey(NormalProject, related_name='project_document', on_delete=models.CASCADE) tagged_doc = models.CharField(max_length=100)", "= file_url def set_file(self, file_url): self.file = file_url class NormalMetadata(models.Model): name = models.CharField(max_length=100,", "validate_file_extension # Create your models here. class NormalProject(models.Model): name = models.CharField(max_length=100, null=False, unique=True)", "on_delete=models.CASCADE) class DocumentNormalMetadataRelation(models.Model): metadata = models.ForeignKey(NormalMetadata, related_name='metadata', on_delete=models.CASCADE) document = models.ForeignKey(Document, related_name='document', on_delete=models.CASCADE)", "= models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_normalproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) public_status", "self.tagged_doc = file_url def set_file(self, file_url): self.file = file_url class NormalMetadata(models.Model): name =", "= models.ForeignKey(Document, related_name='document', on_delete=models.CASCADE) data = models.CharField(max_length=100, blank=True, null=True) class ParallelRelation(models.Model): doc_one =", "class DocumentNormalMetadataRelation(models.Model): metadata = models.ForeignKey(NormalMetadata, related_name='metadata', on_delete=models.CASCADE) document = models.ForeignKey(Document, related_name='document', on_delete=models.CASCADE) data", "= models.ForeignKey(NormalMetadata, related_name='metadata', on_delete=models.CASCADE) document = models.ForeignKey(Document, related_name='document', on_delete=models.CASCADE) data = models.CharField(max_length=100, blank=True,", "name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_parallelproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL)", "models.CharField(max_length=100, null=False, unique=True) project = models.ForeignKey(NormalProject, related_name='project_normalMetadata', on_delete=models.CASCADE) class DocumentNormalMetadataRelation(models.Model): metadata = models.ForeignKey(NormalMetadata,", "null=False, unique=True) project = models.ForeignKey(NormalProject, related_name='project_normalMetadata', on_delete=models.CASCADE) class DocumentNormalMetadataRelation(models.Model): metadata = models.ForeignKey(NormalMetadata, related_name='metadata',", "project = models.ForeignKey(ParallelProject, related_name='project_parallelmetadata', on_delete=models.CASCADE) class DocumentParallelMetadaRelation(models.Model):#falta metadata = models.ForeignKey(ParallelMetadata, related_name=\"parallelmetadata\", on_delete=models.CASCADE) relation", "from django.conf import settings from django.db import models from .validators import validate_file_extension #", "file = models.FileField(blank=False, null=False, upload_to='mediafiles/', validators=[validate_file_extension]) name = models.CharField(max_length=100, null=False, unique=True) owner =", "models.ForeignKey(NormalProject, related_name='project_document', on_delete=models.CASCADE) tagged_doc = models.CharField(max_length=100) timestamp = models.DateTimeField(auto_now_add=True) def set_tagged_doc(self, file_url): self.tagged_doc", "models.ForeignKey(ParallelProject, related_name='project_parallelmetadata', on_delete=models.CASCADE) class DocumentParallelMetadaRelation(models.Model):#falta metadata = models.ForeignKey(ParallelMetadata, related_name=\"parallelmetadata\", on_delete=models.CASCADE) relation = models.ForeignKey(ParallelRelation,", "class ParallelProject(models.Model): name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_parallelproject', on_delete=models.CASCADE) project_members", "str(self.name) class ParallelMetadata(models.Model): name = models.CharField(max_length=100, null=False) project = models.ForeignKey(ParallelProject, related_name='project_parallelmetadata', on_delete=models.CASCADE) class", "file_url): self.file = file_url class NormalMetadata(models.Model): name = models.CharField(max_length=100, null=False, unique=True) project =", "django.db import models from .validators import validate_file_extension # Create your models here. class", "models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status) def get_project_members(self): return self.project_members def __str__(self): return str(self.name)", "models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_parallelproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) relations = models.ManyToManyField(ParallelRelation) public_status = models.BooleanField(default=0) timestamp", "is_public(self): return bool(self.public_status) def is_collab(self): return bool(self.collab_status) def get_project_members(self): return self.project_members def get_owner(self):", "models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status) def is_collab(self): return bool(self.collab_status) def", "settings from django.db import models from .validators import validate_file_extension # Create your models", "related_name='owner_normalproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) public_status = models.BooleanField(default=0) collab_status = models.BooleanField(default=0) timestamp =", "collab_status = models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status) def is_collab(self): return", "return self.owner def set_status_public(self, status): self.public_status=status def set_status_collab(self, status): self.collab_status=status def __str__(self): return", "def set_status_collab(self, status): self.collab_status=status def __str__(self): return str(self.name) class Document(models.Model): file = models.FileField(blank=False,", "self.collab_status=status def __str__(self): return str(self.name) class Document(models.Model): file = models.FileField(blank=False, null=False, upload_to='mediafiles/', validators=[validate_file_extension])", "public_status = models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status) def get_project_members(self): return", "def get_owner(self): return self.owner def set_status_public(self, status): self.public_status=status def set_status_collab(self, status): self.collab_status=status def", "= models.ForeignKey(settings.AUTH_USER_MODEL,related_name='owner_document', on_delete=models.CASCADE) project = models.ForeignKey(NormalProject, related_name='project_document', on_delete=models.CASCADE) tagged_doc = models.CharField(max_length=100) timestamp =", "is_public(self): return bool(self.public_status) def get_project_members(self): return self.project_members def __str__(self): return str(self.name) class ParallelMetadata(models.Model):", "= models.CharField(max_length=100, null=False) project = models.ForeignKey(ParallelProject, related_name='project_parallelmetadata', on_delete=models.CASCADE) class DocumentParallelMetadaRelation(models.Model):#falta metadata = models.ForeignKey(ParallelMetadata,", "null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL,related_name='owner_document', on_delete=models.CASCADE) project = models.ForeignKey(NormalProject, related_name='project_document', on_delete=models.CASCADE) tagged_doc =", "unique=True) project = models.ForeignKey(NormalProject, related_name='project_normalMetadata', on_delete=models.CASCADE) class DocumentNormalMetadataRelation(models.Model): metadata = models.ForeignKey(NormalMetadata, related_name='metadata', on_delete=models.CASCADE)", "bool(self.public_status) def get_project_members(self): return self.project_members def __str__(self): return str(self.name) class ParallelMetadata(models.Model): name =", "related_name='doc_two') class ParallelProject(models.Model): name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_parallelproject', on_delete=models.CASCADE)", "class NormalProject(models.Model): name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_normalproject', on_delete=models.CASCADE) project_members", "models.ManyToManyField(settings.AUTH_USER_MODEL) relations = models.ManyToManyField(ParallelRelation) public_status = models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def is_public(self): return", "models.ForeignKey(ParallelMetadata, related_name=\"parallelmetadata\", on_delete=models.CASCADE) relation = models.ForeignKey(ParallelRelation, related_name=\"relation\", on_delete=models.CASCADE) data = models.CharField(max_length=100, blank=True, null=True)", "NormalProject(models.Model): name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_normalproject', on_delete=models.CASCADE) project_members =", "= models.DateTimeField(auto_now_add=True) def set_tagged_doc(self, file_url): self.tagged_doc = file_url def set_file(self, file_url): self.file =", "class ParallelRelation(models.Model): doc_one = models.ManyToManyField(Document, related_name='doc_one') doc_two = models.ManyToManyField(Document, related_name='doc_two') class ParallelProject(models.Model): name", "Create your models here. class NormalProject(models.Model): name = models.CharField(max_length=100, null=False, unique=True) owner =", "doc_one = models.ManyToManyField(Document, related_name='doc_one') doc_two = models.ManyToManyField(Document, related_name='doc_two') class ParallelProject(models.Model): name = models.CharField(max_length=100,", "name = models.CharField(max_length=100, null=False) project = models.ForeignKey(ParallelProject, related_name='project_parallelmetadata', on_delete=models.CASCADE) class DocumentParallelMetadaRelation(models.Model):#falta metadata =", "set_tagged_doc(self, file_url): self.tagged_doc = file_url def set_file(self, file_url): self.file = file_url class NormalMetadata(models.Model):", "models.ManyToManyField(settings.AUTH_USER_MODEL) public_status = models.BooleanField(default=0) collab_status = models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def is_public(self): return", "status): self.collab_status=status def __str__(self): return str(self.name) class Document(models.Model): file = models.FileField(blank=False, null=False, upload_to='mediafiles/',", "owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_parallelproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) relations = models.ManyToManyField(ParallelRelation) public_status =", "class DocumentParallelMetadaRelation(models.Model):#falta metadata = models.ForeignKey(ParallelMetadata, related_name=\"parallelmetadata\", on_delete=models.CASCADE) relation = models.ForeignKey(ParallelRelation, related_name=\"relation\", on_delete=models.CASCADE) data", "timestamp = models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status) def get_project_members(self): return self.project_members def __str__(self):", "models.CharField(max_length=100) timestamp = models.DateTimeField(auto_now_add=True) def set_tagged_doc(self, file_url): self.tagged_doc = file_url def set_file(self, file_url):", "related_name='document', on_delete=models.CASCADE) data = models.CharField(max_length=100, blank=True, null=True) class ParallelRelation(models.Model): doc_one = models.ManyToManyField(Document, related_name='doc_one')", "= models.ManyToManyField(Document, related_name='doc_two') class ParallelProject(models.Model): name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL,", "related_name='project_parallelmetadata', on_delete=models.CASCADE) class DocumentParallelMetadaRelation(models.Model):#falta metadata = models.ForeignKey(ParallelMetadata, related_name=\"parallelmetadata\", on_delete=models.CASCADE) relation = models.ForeignKey(ParallelRelation, related_name=\"relation\",", "get_owner(self): return self.owner def set_status_public(self, status): self.public_status=status def set_status_collab(self, status): self.collab_status=status def __str__(self):", "on_delete=models.CASCADE) tagged_doc = models.CharField(max_length=100) timestamp = models.DateTimeField(auto_now_add=True) def set_tagged_doc(self, file_url): self.tagged_doc = file_url", "self.file = file_url class NormalMetadata(models.Model): name = models.CharField(max_length=100, null=False, unique=True) project = models.ForeignKey(NormalProject,", "self.owner def set_status_public(self, status): self.public_status=status def set_status_collab(self, status): self.collab_status=status def __str__(self): return str(self.name)", "relations = models.ManyToManyField(ParallelRelation) public_status = models.BooleanField(default=0) timestamp = models.DateTimeField(auto_now_add=True) def is_public(self): return bool(self.public_status)", "django.conf import settings from django.db import models from .validators import validate_file_extension # Create", "validators=[validate_file_extension]) name = models.CharField(max_length=100, null=False, unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL,related_name='owner_document', on_delete=models.CASCADE) project = models.ForeignKey(NormalProject,", "from .validators import validate_file_extension # Create your models here. class NormalProject(models.Model): name =", "Document(models.Model): file = models.FileField(blank=False, null=False, upload_to='mediafiles/', validators=[validate_file_extension]) name = models.CharField(max_length=100, null=False, unique=True) owner", "models.ForeignKey(NormalProject, related_name='project_normalMetadata', on_delete=models.CASCADE) class DocumentNormalMetadataRelation(models.Model): metadata = models.ForeignKey(NormalMetadata, related_name='metadata', on_delete=models.CASCADE) document = models.ForeignKey(Document,", "unique=True) owner = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='owner_normalproject', on_delete=models.CASCADE) project_members = models.ManyToManyField(settings.AUTH_USER_MODEL) public_status = models.BooleanField(default=0) collab_status", "related_name='metadata', on_delete=models.CASCADE) document = models.ForeignKey(Document, related_name='document', on_delete=models.CASCADE) data = models.CharField(max_length=100, blank=True, null=True) class", "return str(self.name) class ParallelMetadata(models.Model): name = models.CharField(max_length=100, null=False) project = models.ForeignKey(ParallelProject, related_name='project_parallelmetadata', on_delete=models.CASCADE)", "null=True) class ParallelRelation(models.Model): doc_one = models.ManyToManyField(Document, related_name='doc_one') doc_two = models.ManyToManyField(Document, related_name='doc_two') class ParallelProject(models.Model):" ]
[ "st_c = ed_c loss = torch.stack(loss, dim=1) return (loss * m).sum() / data.size()[0]", "0 self.p = np.zeros((counter, maximum_interval(output_info))) self.p_sampling = [] for item in output_info: if", "loss_mean + loss_std loss_info.backward() optimizerG.step() if problem_type: fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake)", "output_info, c, m): loss = [] st = 0 st_c = 0 for", "numpy as np import pandas as pd import torch import torch.utils.data import torch.optim", "i break layers_G = determine_layers_gen(self.gside, self.random_dim+self.cond_generator.n_opt, self.num_channels) layers_D = determine_layers_disc(self.dside, self.num_channels) self.generator =", "m) _,info_real = discriminator(real_cat_d) g = -(torch.log(y_fake + 1e-4).mean()) + cross_entropy g.backward(retain_graph=True) loss_mean", "optim.Adam(classifier.parameters(),**optimizer_params) self.generator.apply(weights_init) discriminator.apply(weights_init) self.Gtransformer = ImageTransformer(self.gside) self.Dtransformer = ImageTransformer(self.dside) steps_per_epoch = max(1, len(train_data)", "'softmax': ed = st + item[0] counter += 1 self.model.append(np.argmax(data[:, st:ed], axis=-1)) st", "m.__class__.__name__ if classname.find('Conv') != -1: init.normal_(m.weight.data, 0.0, 0.02) elif classname.find('BatchNorm') != -1: init.normal_(m.weight.data,", "col is None: idx = np.random.choice(np.arange(self.n), n) return self.data[idx] idx = [] for", "[Linear(dim,(st_ed[1]-st_ed[0]))] self.seq = Sequential(*seq) def forward(self, input): label=None if (self.str_end[1]-self.str_end[0])==1: label = input[:,", "= data_dim + self.cond_generator.n_opt for i in sides: if i * i >=", "1e-4).mean()) + cross_entropy g.backward(retain_graph=True) loss_mean = torch.norm(torch.mean(info_fake.view(self.batch_size,-1), dim=0) - torch.mean(info_real.view(self.batch_size,-1), dim=0), 1) loss_std", "self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) real_pre, real_label = classifier(real) fake_pre, fake_label = classifier(fakeact)", "// 2)) layers_D = [] for prev, curr in zip(layer_dims, layer_dims[1:]): layers_D +=", "1.0, 0.02) init.constant_(m.bias.data, 0) class CTABGANSynthesizer: def __init__(self, class_dim=(256, 256, 256, 256), random_dim=100,", "else: seq += [Linear(dim,(st_ed[1]-st_ed[0]))] self.seq = Sequential(*seq) def forward(self, input): label=None if (self.str_end[1]-self.str_end[0])==1:", "side, layers): super(Discriminator, self).__init__() self.side = side info = len(layers)-2 self.seq = Sequential(*layers)", "import Adam from torch.nn import functional as F from torch.nn import (Dropout, LeakyReLU,", "torch.argmax(input[:, self.str_end[0]:self.str_end[1]], axis=-1) new_imp = torch.cat((input[:,:self.str_end[0]],input[:,self.str_end[1]:]),1) if ((self.str_end[1]-self.str_end[0])==2) | ((self.str_end[1]-self.str_end[0])==1): return self.seq(new_imp).view(-1), label", "axis=0) tmp = np.log(tmp + 1) tmp = tmp / np.sum(tmp) tmp_sampling =", "ed= st+output_info[tc][0] return (st,ed) def random_choice_prob_index_sampling(probs,col_idx): option_list = [] for i in col_idx:", "'softmax': ed = st + item[0] ed_c = st_c + item[0] tmp =", "classname.find('Conv') != -1: init.normal_(m.weight.data, 0.0, 0.02) elif classname.find('BatchNorm') != -1: init.normal_(m.weight.data, 1.0, 0.02)", "as np import pandas as pd import torch import torch.utils.data import torch.optim as", "= st + item[0] data_t.append(torch.tanh(data[:, st:ed])) st = ed elif item[1] == 'softmax':", "st = 0 counter = 0 for item in output_info: if item[1] ==", "// self.batch_size + 1 data = [] for i in range(steps): noisez =", "Generator(Module): def __init__(self, side, layers): super(Generator, self).__init__() self.side = side self.seq = Sequential(*layers)", "zip(reversed(layer_dims), reversed(layer_dims[:-1])): layers_G += [ BatchNorm2d(prev[0]), ReLU(True), ConvTranspose2d(prev[0], curr[0], 4, 2, 1, output_padding=0,", "layer_dims[-1][1], 1, 0), Sigmoid() ] return layers_D def determine_layers_gen(side, random_dim, num_channels): assert side", "c], dim=1) real_cat = torch.cat([real, c_perm], dim=1) real_cat_d = self.Dtransformer.transform(real_cat) fake_cat_d = self.Dtransformer.transform(fake_cat)", "// self.batch_size) for i in tqdm(range(self.epochs)): for _ in range(steps_per_epoch): noisez = torch.randn(self.batch_size,", "Adam(self.generator.parameters(), **optimizer_params) optimizerD = Adam(discriminator.parameters(), **optimizer_params) st_ed = None classifier=None optimizerC= None if", "for i in col_idx: pp = probs[i] option_list.append(np.random.choice(np.arange(len(probs[i])), p=pp)) return np.array(option_list).reshape(col_idx.shape) def random_choice_prob_index(a,", "(st_ed[1] - st_ed[0])==2: c_loss = BCELoss() real_label = real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) loss_cc", "CrossEntropyLoss() if (st_ed[1] - st_ed[0])==1: c_loss= SmoothL1Loss() real_label = real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre)", "input_): return self.seq(input_) def determine_layers_disc(side, num_channels): assert side >= 4 and side <=", "random_dim, layer_dims[-1][0], layer_dims[-1][1], 1, 0, output_padding=0, bias=False) ] for prev, curr in zip(reversed(layer_dims),", "= torch.stack(loss, dim=1) return (loss * m).sum() / data.size()[0] class Sampler(object): def __init__(self,", "continue elif item[1] == 'softmax': ed = st + item[0] tmp = []", "self.transformer.output_info) sides = [4, 8, 16, 24, 32] col_size_d = data_dim + self.cond_generator.n_opt", "import functional as F from torch.nn import (Dropout, LeakyReLU, Linear, Module, ReLU, Sequential,", "class Generator(Module): def __init__(self, side, layers): super(Generator, self).__init__() self.side = side self.seq =", "return vec def cond_loss(data, output_info, c, m): loss = [] st = 0", "0 tc= 0 for item in output_info: if c==target_col_index: break if item[1]=='tanh': st", "noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket,output_info) data.append(fakeact.detach().cpu().numpy()) data =", "get_st_ed(target_col_index,output_info): st = 0 c= 0 tc= 0 for item in output_info: if", "m = torch.from_numpy(m).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) optimizerG.zero_grad() fake", "st_ed for item in list(dis_dims): seq += [ Linear(dim, item), LeakyReLU(0.2), Dropout(0.5) ]", "noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) perm = np.arange(self.batch_size) np.random.shuffle(perm) real = data_sampler.sample(self.batch_size, col[perm], opt[perm]) c_perm = c[perm]", "= 0 c= 0 tc= 0 for item in output_info: if c==target_col_index: break", "import pandas as pd import torch import torch.utils.data import torch.optim as optim from", "output_padding=0, bias=True) ] return layers_G def weights_init(m): classname = m.__class__.__name__ if classname.find('Conv') !=", "== 0: return None batch = batch idx = np.random.choice(np.arange(self.n_col), batch) vec =", "1),Sigmoid()] else: seq += [Linear(dim,(st_ed[1]-st_ed[0]))] self.seq = Sequential(*seq) def forward(self, input): label=None if", "optimizerG.step() if problem_type: fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info)", "elif item[1] == 'softmax': ed = st + item[0] data_t.append(F.gumbel_softmax(data[:, st:ed], tau=0.2)) st", "c_loss= SmoothL1Loss() real_label = real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) real_label = torch.reshape(real_label,real_pre.size()) fake_label =", "= self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) fake_cat = torch.cat([fakeact, c], dim=1) fake_cat =", "and len(layer_dims) < 4: layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1] // 2)) layers_G = [", "data[:, st:ed], torch.argmax(c[:, st_c:ed_c], dim=1), reduction='none') loss.append(tmp) st = ed st_c = ed_c", "label def apply_activate(data, output_info): data_t = [] st = 0 for item in", "= torch.from_numpy(c).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) fake = self.generator(noisez)", "self.random_dim = random_dim self.class_dim = class_dim self.num_channels = num_channels self.dside = None self.gside", "> r).argmax(axis=axis) def maximum_interval(output_info): max_interval = 0 for item in output_info: max_interval =", "torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact", "st:ed], axis=0) tmp_sampling = np.sum(data[:, st:ed], axis=0) tmp = np.log(tmp + 1) tmp", "tmp self.interval.append((self.n_opt, item[0])) self.n_opt += item[0] self.n_col += 1 st = ed self.interval", "self.epochs = epochs self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") def fit(self, train_data=pd.DataFrame,", "self.data[idx] idx = [] for c, o in zip(col, opt): idx.append(np.random.choice(self.model[c][o])) return self.data[idx]", "import torch import torch.utils.data import torch.optim as optim from torch.optim import Adam from", "0] + opt1prime[i]] = 1 return vec def cond_loss(data, output_info, c, m): loss", "col, opt = condvec c = torch.from_numpy(c).to(self.device) m = torch.from_numpy(m).to(self.device) noisez = torch.cat([noisez,", "target_index=None if type: problem_type = list(type.keys())[0] if problem_type: target_index = train_data.columns.get_loc(type[problem_type]) self.transformer =", "idx] = 1 opt1prime = random_choice_prob_index(self.p[idx]) for i in np.arange(batch): vec[i, self.interval[idx[i], 0]", "return np.array(option_list).reshape(col_idx.shape) def random_choice_prob_index(a, axis=1): r = np.expand_dims(np.random.rand(a.shape[1 - axis]), axis=axis) return (a.cumsum(axis=axis)", "CTABGANSynthesizer: def __init__(self, class_dim=(256, 256, 256, 256), random_dim=100, num_channels=64, l2scale=1e-5, batch_size=500, epochs=1): self.random_dim", "item[1] == 'softmax': ed = st + item[0] tmp = np.sum(data[:, st:ed], axis=0)", "return layers_D def determine_layers_gen(side, random_dim, num_channels): assert side >= 4 and side <=", "st + item[0] data_t.append(F.gumbel_softmax(data[:, st:ed], tau=0.2)) st = ed return torch.cat(data_t, dim=1) def", "mixed={}, type={}): problem_type = None target_index=None if type: problem_type = list(type.keys())[0] if problem_type:", "self.seq(new_imp), label def apply_activate(data, output_info): data_t = [] st = 0 for item", "return torch.cat(data_t, dim=1) def get_st_ed(target_col_index,output_info): st = 0 c= 0 tc= 0 for", "forward(self, input): return (self.seq(input)), self.seq_info(input) class Generator(Module): def __init__(self, side, layers): super(Generator, self).__init__()", "BatchNorm2d(prev[0]), ReLU(True), ConvTranspose2d(prev[0], curr[0], 4, 2, 1, output_padding=0, bias=True) ] return layers_G def", "batch) vec = np.zeros((batch, self.n_opt), dtype='float32') mask = np.zeros((batch, self.n_col), dtype='float32') mask[np.arange(batch), idx]", "col, opt): if col is None: idx = np.random.choice(np.arange(self.n), n) return self.data[idx] idx", "np.random.shuffle(perm) real = data_sampler.sample(self.batch_size, col[perm], opt[perm]) c_perm = c[perm] real = torch.from_numpy(real.astype('float32')).to(self.device) fake", "_ in range(steps_per_epoch): noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample_train(self.batch_size) c, m,", "= self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) real_pre, real_label = classifier(real) fake_pre, fake_label =", "= DataTransformer(train_data=train_data, categorical_list=categorical, mixed_dict=mixed) self.transformer.fit() train_data = self.transformer.transform(train_data.values) data_sampler = Sampler(train_data, self.transformer.output_info) data_dim", "self.batch_size = batch_size self.epochs = epochs self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")", "elif classname.find('BatchNorm') != -1: init.normal_(m.weight.data, 1.0, 0.02) init.constant_(m.bias.data, 0) class CTABGANSynthesizer: def __init__(self,", "return self.data[idx] class Discriminator(Module): def __init__(self, side, layers): super(Discriminator, self).__init__() self.side = side", "if (st_ed[1] - st_ed[0])==1: c_loss= SmoothL1Loss() real_label = real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) real_label", "ConvTranspose2d( random_dim, layer_dims[-1][0], layer_dims[-1][1], 1, 0, output_padding=0, bias=False) ] for prev, curr in", "+= [ BatchNorm2d(prev[0]), ReLU(True), ConvTranspose2d(prev[0], curr[0], 4, 2, 1, output_padding=0, bias=True) ] return", "np.sum(tmp) tmp_sampling = tmp_sampling / np.sum(tmp_sampling) self.p_sampling.append(tmp_sampling) self.p[self.n_col, :item[0]] = tmp self.interval.append((self.n_opt, item[0]))", "fake_label.type_as(fake_pre) real_label = torch.reshape(real_label,real_pre.size()) fake_label = torch.reshape(fake_label,fake_pre.size()) elif (st_ed[1] - st_ed[0])==2: c_loss =", "ed = st + item[0] data_t.append(F.gumbel_softmax(data[:, st:ed], tau=0.2)) st = ed return torch.cat(data_t,", "torch.std(info_real.view(self.batch_size,-1), dim=0), 1) loss_info = loss_mean + loss_std loss_info.backward() optimizerG.step() if problem_type: fake", "epochs self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") def fit(self, train_data=pd.DataFrame, categorical=[], mixed={},", "type: problem_type = list(type.keys())[0] if problem_type: target_index = train_data.columns.get_loc(type[problem_type]) self.transformer = DataTransformer(train_data=train_data, categorical_list=categorical,", "p=pp)) return np.array(option_list).reshape(col_idx.shape) def random_choice_prob_index(a, axis=1): r = np.expand_dims(np.random.rand(a.shape[1 - axis]), axis=axis) return", "st:ed], axis=-1)) st = ed self.interval = [] self.n_col = 0 self.n_opt =", "torch.from_numpy(m).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) perm = np.arange(self.batch_size) np.random.shuffle(perm)", "self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket,output_info) data.append(fakeact.detach().cpu().numpy()) data = np.concatenate(data, axis=0) result = self.transformer.inverse_transform(data) return", "return (st,ed) def random_choice_prob_index_sampling(probs,col_idx): option_list = [] for i in col_idx: pp =", "axis]), axis=axis) return (a.cumsum(axis=axis) > r).argmax(axis=axis) def maximum_interval(output_info): max_interval = 0 for item", "layers_D = [] for prev, curr in zip(layer_dims, layer_dims[1:]): layers_D += [ Conv2d(prev[0],", "0 counter = 0 for item in output_info: if item[1] == 'tanh': st", "= c_loss(real_pre, real_label) loss_cg = c_loss(fake_pre, fake_label) optimizerG.zero_grad() loss_cg.backward() optimizerG.step() optimizerC.zero_grad() loss_cc.backward() optimizerC.step()", "[] for item in output_info: if item[1] == 'tanh': st += item[0] continue", "opt[perm]) c_perm = c[perm] real = torch.from_numpy(real.astype('float32')).to(self.device) fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake)", "continue elif item[1] == 'softmax': ed = st + item[0] counter += 1", "= torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample_train(self.batch_size) c, m, col, opt = condvec", "(self.str_end[1]-self.str_end[0])==1: label = input[:, self.str_end[0]:self.str_end[1]] else: label = torch.argmax(input[:, self.str_end[0]:self.str_end[1]], axis=-1) new_imp =", "-(torch.log(y_fake + 1e-4).mean()) + cross_entropy g.backward(retain_graph=True) loss_mean = torch.norm(torch.mean(info_fake.view(self.batch_size,-1), dim=0) - torch.mean(info_real.view(self.batch_size,-1), dim=0),", "Module, ReLU, Sequential, Conv2d, ConvTranspose2d, BatchNorm2d, Sigmoid, init, BCELoss, CrossEntropyLoss,SmoothL1Loss) from model.synthesizer.transformer import", "sample(self, n, col, opt): if col is None: idx = np.random.choice(np.arange(self.n), n) return", "fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket,output_info) data.append(fakeact.detach().cpu().numpy()) data = np.concatenate(data,", "= self.Dtransformer.transform(real_cat) fake_cat_d = self.Dtransformer.transform(fake_cat) optimizerD.zero_grad() y_real,_ = discriminator(real_cat_d) y_fake,_ = discriminator(fake_cat_d) loss_d", ">= col_size_g: self.gside = i break layers_G = determine_layers_gen(self.gside, self.random_dim+self.cond_generator.n_opt, self.num_channels) layers_D =", "ed_c = st_c + item[0] tmp = F.cross_entropy( data[:, st:ed], torch.argmax(c[:, st_c:ed_c], dim=1),", "len(layer_dims) < 4: layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1] // 2)) layers_D = [] for", "noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket,output_info) data.append(fakeact.detach().cpu().numpy())", "data_t = [] st = 0 for item in output_info: if item[1] ==", "np.sum(data[:, st:ed], axis=0) tmp_sampling = np.sum(data[:, st:ed], axis=0) tmp = np.log(tmp + 1)", "for i in sides: if i * i >= col_size_d: self.dside = i", "for i in np.arange(batch): vec[i, self.interval[idx[i], 0] + opt1prime[i]] = 1 return vec,", "betas=(0.5, 0.9), eps=1e-3, weight_decay=self.l2scale) optimizerG = Adam(self.generator.parameters(), **optimizer_params) optimizerD = Adam(discriminator.parameters(), **optimizer_params) st_ed", "= [4, 8, 16, 24, 32] col_size_d = data_dim + self.cond_generator.n_opt for i", "item in output_info: if item[1] == 'tanh': ed = st + item[0] data_t.append(torch.tanh(data[:,", "[] for j in range(item[0]): tmp.append(np.nonzero(data[:, st + j])[0]) self.model.append(tmp) st = ed", "self.dside = None self.gside = None self.l2scale = l2scale self.batch_size = batch_size self.epochs", "np import pandas as pd import torch import torch.utils.data import torch.optim as optim", "ConvTranspose2d(prev[0], curr[0], 4, 2, 1, output_padding=0, bias=True) ] return layers_G def weights_init(m): classname", "layers_G).to(self.device) discriminator = Discriminator(self.dside, layers_D).to(self.device) optimizer_params = dict(lr=2e-4, betas=(0.5, 0.9), eps=1e-3, weight_decay=self.l2scale) optimizerG", "self.n_col), dtype='float32') mask[np.arange(batch), idx] = 1 opt1prime = random_choice_prob_index(self.p[idx]) for i in np.arange(batch):", "= 1 return vec def cond_loss(data, output_info, c, m): loss = [] st", "F.cross_entropy( data[:, st:ed], torch.argmax(c[:, st_c:ed_c], dim=1), reduction='none') loss.append(tmp) st = ed st_c =", "cross_entropy = cond_loss(faket, self.transformer.output_info, c, m) _,info_real = discriminator(real_cat_d) g = -(torch.log(y_fake +", "torch.reshape(fake_label,fake_pre.size()) elif (st_ed[1] - st_ed[0])==2: c_loss = BCELoss() real_label = real_label.type_as(real_pre) fake_label =", "= 0 for item in output_info: if item[1] == 'tanh': ed = st", "] return layers_G def weights_init(m): classname = m.__class__.__name__ if classname.find('Conv') != -1: init.normal_(m.weight.data,", "= torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample(self.batch_size) c = condvec c = torch.from_numpy(c).to(self.device)", "n) return self.data[idx] idx = [] for c, o in zip(col, opt): idx.append(np.random.choice(self.model[c][o]))", "self.p[self.n_col, :item[0]] = tmp self.interval.append((self.n_opt, item[0])) self.n_opt += item[0] self.n_col += 1 st", "None self.l2scale = l2scale self.batch_size = batch_size self.epochs = epochs self.device = torch.device(\"cuda:0\"", "self.p_sampling = [] for item in output_info: if item[1] == 'tanh': st +=", "def __init__(self, data, output_info): super(Sampler, self).__init__() self.data = data self.model = [] self.n", "np.zeros((counter, maximum_interval(output_info))) self.p_sampling = [] for item in output_info: if item[1] == 'tanh':", "] return layers_D def determine_layers_gen(side, random_dim, num_channels): assert side >= 4 and side", "Generator(self.gside, layers_G).to(self.device) discriminator = Discriminator(self.dside, layers_D).to(self.device) optimizer_params = dict(lr=2e-4, betas=(0.5, 0.9), eps=1e-3, weight_decay=self.l2scale)", "= discriminator(fake_cat_d) loss_d = (-(torch.log(y_real + 1e-4).mean()) - (torch.log(1. - y_fake + 1e-4).mean()))", "input): label=None if (self.str_end[1]-self.str_end[0])==1: label = input[:, self.str_end[0]:self.str_end[1]] else: label = torch.argmax(input[:, self.str_end[0]:self.str_end[1]],", "def fit(self, train_data=pd.DataFrame, categorical=[], mixed={}, type={}): problem_type = None target_index=None if type: problem_type", "item[0] counter += 1 self.model.append(np.argmax(data[:, st:ed], axis=-1)) st = ed self.interval = []", "def forward(self, input): label=None if (self.str_end[1]-self.str_end[0])==1: label = input[:, self.str_end[0]:self.str_end[1]] else: label =", "item in output_info: if item[1] == 'tanh': st += item[0] continue elif item[1]", "Sequential(*layers) self.seq_info = Sequential(*layers[:info]) def forward(self, input): return (self.seq(input)), self.seq_info(input) class Generator(Module): def", "faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) fake_cat = torch.cat([fakeact, c], dim=1) fake_cat", "perm = np.arange(self.batch_size) np.random.shuffle(perm) real = data_sampler.sample(self.batch_size, col[perm], opt[perm]) c_perm = c[perm] real", "1, output_padding=0, bias=True) ] return layers_G def weights_init(m): classname = m.__class__.__name__ if classname.find('Conv')", "dim=0), 1) loss_info = loss_mean + loss_std loss_info.backward() optimizerG.step() if problem_type: fake =", "opt): idx.append(np.random.choice(self.model[c][o])) return self.data[idx] class Discriminator(Module): def __init__(self, side, layers): super(Discriminator, self).__init__() self.side", "len(layer_dims) < 4: layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1] // 2)) layers_G = [ ConvTranspose2d(", "None self.gside = None self.l2scale = l2scale self.batch_size = batch_size self.epochs = epochs", "c==target_col_index: break if item[1]=='tanh': st += item[0] elif item[1] == 'softmax': st +=", "4: layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1] // 2)) layers_G = [ ConvTranspose2d( random_dim, layer_dims[-1][0],", "tc+=1 ed= st+output_info[tc][0] return (st,ed) def random_choice_prob_index_sampling(probs,col_idx): option_list = [] for i in", "self.num_channels) self.generator = Generator(self.gside, layers_G).to(self.device) discriminator = Discriminator(self.dside, layers_D).to(self.device) optimizer_params = dict(lr=2e-4, betas=(0.5,", "= batch idx = np.random.choice(np.arange(self.n_col), batch) vec = np.zeros((batch, self.n_opt), dtype='float32') opt1prime =", "st = 0 c= 0 tc= 0 for item in output_info: if c==target_col_index:", "optimizerC.step() def sample(self, n): self.generator.eval() output_info = self.transformer.output_info steps = n // self.batch_size", "tqdm import tqdm class Classifier(Module): def __init__(self,input_dim, dis_dims,st_ed): super(Classifier,self).__init__() dim = input_dim-(st_ed[1]-st_ed[0]) seq", "self.model.append(np.argmax(data[:, st:ed], axis=-1)) st = ed self.interval = [] self.n_col = 0 self.n_opt", "in range(steps_per_epoch): noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample_train(self.batch_size) c, m, col,", "self.model.append(tmp) st = ed def sample(self, n, col, opt): if col is None:", "self.n_col == 0: return None batch = batch idx = np.random.choice(np.arange(self.n_col), batch) vec", "label=None if (self.str_end[1]-self.str_end[0])==1: label = input[:, self.str_end[0]:self.str_end[1]] else: label = torch.argmax(input[:, self.str_end[0]:self.str_end[1]], axis=-1)", "label else: return self.seq(new_imp), label def apply_activate(data, output_info): data_t = [] st =", "tmp = F.cross_entropy( data[:, st:ed], torch.argmax(c[:, st_c:ed_c], dim=1), reduction='none') loss.append(tmp) st = ed", "np.sum(tmp_sampling) self.p_sampling.append(tmp_sampling) self.p[self.n_col, :item[0]] = tmp self.interval.append((self.n_opt, item[0])) self.n_opt += item[0] self.n_col +=", "def maximum_interval(output_info): max_interval = 0 for item in output_info: max_interval = max(max_interval, item[0])", "self.transformer.output_dim self.cond_generator = Cond(train_data, self.transformer.output_info) sides = [4, 8, 16, 24, 32] col_size_d", "torch import torch.utils.data import torch.optim as optim from torch.optim import Adam from torch.nn", "ConvTranspose2d, BatchNorm2d, Sigmoid, init, BCELoss, CrossEntropyLoss,SmoothL1Loss) from model.synthesizer.transformer import ImageTransformer,DataTransformer from tqdm import", "fake_cat = torch.cat([fakeact, c], dim=1) real_cat = torch.cat([real, c_perm], dim=1) real_cat_d = self.Dtransformer.transform(real_cat)", "num_channels self.dside = None self.gside = None self.l2scale = l2scale self.batch_size = batch_size", "layers_D += [ Conv2d(layer_dims[-1][0], 1, layer_dims[-1][1], 1, 0), Sigmoid() ] return layers_D def", "def apply_activate(data, output_info): data_t = [] st = 0 for item in output_info:", "st = 0 st_c = 0 for item in output_info: if item[1] ==", "None target_index=None if type: problem_type = list(type.keys())[0] if problem_type: target_index = train_data.columns.get_loc(type[problem_type]) self.transformer", "c[perm] real = torch.from_numpy(real.astype('float32')).to(self.device) fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket,", "layers): super(Discriminator, self).__init__() self.side = side info = len(layers)-2 self.seq = Sequential(*layers) self.seq_info", "opt1prime = random_choice_prob_index_sampling(self.p_sampling,idx) for i in np.arange(batch): vec[i, self.interval[idx[i], 0] + opt1prime[i]] =", "target_index = train_data.columns.get_loc(type[problem_type]) self.transformer = DataTransformer(train_data=train_data, categorical_list=categorical, mixed_dict=mixed) self.transformer.fit() train_data = self.transformer.transform(train_data.values) data_sampler", "= st + item[0] tmp = np.sum(data[:, st:ed], axis=0) tmp_sampling = np.sum(data[:, st:ed],", "curr in zip(layer_dims, layer_dims[1:]): layers_D += [ Conv2d(prev[0], curr[0], 4, 2, 1, bias=False),", "self.transformer.output_info) fake_cat = torch.cat([fakeact, c], dim=1) real_cat = torch.cat([real, c_perm], dim=1) real_cat_d =", "elif (st_ed[1] - st_ed[0])==2: c_loss = BCELoss() real_label = real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre)", "axis=-1)) st = ed self.interval = [] self.n_col = 0 self.n_opt = 0", "g.backward(retain_graph=True) loss_mean = torch.norm(torch.mean(info_fake.view(self.batch_size,-1), dim=0) - torch.mean(info_real.view(self.batch_size,-1), dim=0), 1) loss_std = torch.norm(torch.std(info_fake.view(self.batch_size,-1), dim=0)", "if classname.find('Conv') != -1: init.normal_(m.weight.data, 0.0, 0.02) elif classname.find('BatchNorm') != -1: init.normal_(m.weight.data, 1.0,", "c_loss(real_pre, real_label) loss_cg = c_loss(fake_pre, fake_label) optimizerG.zero_grad() loss_cg.backward() optimizerG.step() optimizerC.zero_grad() loss_cc.backward() optimizerC.step() def", "c, o in zip(col, opt): idx.append(np.random.choice(self.model[c][o])) return self.data[idx] class Discriminator(Module): def __init__(self, side,", "def determine_layers_disc(side, num_channels): assert side >= 4 and side <= 32 layer_dims =", "if (self.str_end[1]-self.str_end[0])==1: label = input[:, self.str_end[0]:self.str_end[1]] else: label = torch.argmax(input[:, self.str_end[0]:self.str_end[1]], axis=-1) new_imp", "dim=1) return (loss * m).sum() / data.size()[0] class Sampler(object): def __init__(self, data, output_info):", "= side info = len(layers)-2 self.seq = Sequential(*layers) self.seq_info = Sequential(*layers[:info]) def forward(self,", "Classifier(data_dim,self.class_dim,st_ed).to(self.device) optimizerC = optim.Adam(classifier.parameters(),**optimizer_params) self.generator.apply(weights_init) discriminator.apply(weights_init) self.Gtransformer = ImageTransformer(self.gside) self.Dtransformer = ImageTransformer(self.dside) steps_per_epoch", "tmp_sampling = tmp_sampling / np.sum(tmp_sampling) self.p_sampling.append(tmp_sampling) self.p[self.n_col, :item[0]] = tmp self.interval.append((self.n_opt, item[0])) self.n_opt", "loss_info = loss_mean + loss_std loss_info.backward() optimizerG.step() if problem_type: fake = self.generator(noisez) faket", "c = torch.from_numpy(c).to(self.device) m = torch.from_numpy(m).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez =", "Discriminator(self.dside, layers_D).to(self.device) optimizer_params = dict(lr=2e-4, betas=(0.5, 0.9), eps=1e-3, weight_decay=self.l2scale) optimizerG = Adam(self.generator.parameters(), **optimizer_params)", "[] st = 0 st_c = 0 for item in output_info: if item[1]", "= torch.cat([fakeact, c], dim=1) fake_cat = self.Dtransformer.transform(fake_cat) y_fake,info_fake = discriminator(fake_cat) cross_entropy = cond_loss(faket,", "self.p = np.zeros((counter, maximum_interval(output_info))) self.p_sampling = [] for item in output_info: if item[1]", "data_sampler = Sampler(train_data, self.transformer.output_info) data_dim = self.transformer.output_dim self.cond_generator = Cond(train_data, self.transformer.output_info) sides =", "BCELoss, CrossEntropyLoss,SmoothL1Loss) from model.synthesizer.transformer import ImageTransformer,DataTransformer from tqdm import tqdm class Classifier(Module): def", "real = data_sampler.sample(self.batch_size, col[perm], opt[perm]) c_perm = c[perm] real = torch.from_numpy(real.astype('float32')).to(self.device) fake =", "c_loss = CrossEntropyLoss() if (st_ed[1] - st_ed[0])==1: c_loss= SmoothL1Loss() real_label = real_label.type_as(real_pre) fake_label", "torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample_train(self.batch_size) c, m, col, opt = condvec c", "0 for item in output_info: if item[1] == 'tanh': st += item[0] continue", "* i >= col_size_d: self.dside = i break sides = [4, 8, 16,", "0.02) init.constant_(m.bias.data, 0) class CTABGANSynthesizer: def __init__(self, class_dim=(256, 256, 256, 256), random_dim=100, num_channels=64,", "= ed return torch.cat(data_t, dim=1) def get_st_ed(target_col_index,output_info): st = 0 c= 0 tc=", "= 0 self.p = np.zeros((counter, maximum_interval(output_info))) self.p_sampling = [] for item in output_info:", "is None: idx = np.random.choice(np.arange(self.n), n) return self.data[idx] idx = [] for c,", "pp = probs[i] option_list.append(np.random.choice(np.arange(len(probs[i])), p=pp)) return np.array(option_list).reshape(col_idx.shape) def random_choice_prob_index(a, axis=1): r = np.expand_dims(np.random.rand(a.shape[1", "loss_cc = c_loss(real_pre, real_label) loss_cg = c_loss(fake_pre, fake_label) optimizerG.zero_grad() loss_cg.backward() optimizerG.step() optimizerC.zero_grad() loss_cc.backward()", "fake_cat_d = self.Dtransformer.transform(fake_cat) optimizerD.zero_grad() y_real,_ = discriminator(real_cat_d) y_fake,_ = discriminator(fake_cat_d) loss_d = (-(torch.log(y_real", "item[0] continue elif item[1] == 'softmax': ed = st + item[0] counter +=", "torch.from_numpy(c).to(self.device) m = torch.from_numpy(m).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) optimizerG.zero_grad()", "cond_loss(faket, self.transformer.output_info, c, m) _,info_real = discriminator(real_cat_d) g = -(torch.log(y_fake + 1e-4).mean()) +", "= torch.cat((input[:,:self.str_end[0]],input[:,self.str_end[1]:]),1) if ((self.str_end[1]-self.str_end[0])==2) | ((self.str_end[1]-self.str_end[0])==1): return self.seq(new_imp).view(-1), label else: return self.seq(new_imp), label", "self.data[idx] class Discriminator(Module): def __init__(self, side, layers): super(Discriminator, self).__init__() self.side = side info", "continue elif item[1] == 'softmax': ed = st + item[0] tmp = np.sum(data[:,", "256, 256, 256), random_dim=100, num_channels=64, l2scale=1e-5, batch_size=500, epochs=1): self.random_dim = random_dim self.class_dim =", "self.batch_size + 1 data = [] for i in range(steps): noisez = torch.randn(self.batch_size,", "= real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) loss_cc = c_loss(real_pre, real_label) loss_cg = c_loss(fake_pre, fake_label)", "== 'softmax': ed = st + item[0] data_t.append(F.gumbel_softmax(data[:, st:ed], tau=0.2)) st = ed", "[ Linear(dim, item), LeakyReLU(0.2), Dropout(0.5) ] dim = item if (st_ed[1]-st_ed[0])==1: seq +=", "4, 2, 1, output_padding=0, bias=True) ] return layers_G def weights_init(m): classname = m.__class__.__name__", "dim = input_dim-(st_ed[1]-st_ed[0]) seq = [] self.str_end = st_ed for item in list(dis_dims):", "new_imp = torch.cat((input[:,:self.str_end[0]],input[:,self.str_end[1]:]),1) if ((self.str_end[1]-self.str_end[0])==2) | ((self.str_end[1]-self.str_end[0])==1): return self.seq(new_imp).view(-1), label else: return self.seq(new_imp),", "= len(data) st = 0 for item in output_info: if item[1] == 'tanh':", "def __init__(self,input_dim, dis_dims,st_ed): super(Classifier,self).__init__() dim = input_dim-(st_ed[1]-st_ed[0]) seq = [] self.str_end = st_ed", "and len(layer_dims) < 4: layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1] // 2)) layers_D = []", "+ item[0] tmp = [] for j in range(item[0]): tmp.append(np.nonzero(data[:, st + j])[0])", "layer_dims[-1][1] // 2)) layers_D = [] for prev, curr in zip(layer_dims, layer_dims[1:]): layers_D", "Sigmoid() ] return layers_D def determine_layers_gen(side, random_dim, num_channels): assert side >= 4 and", "col_size_g: self.gside = i break layers_G = determine_layers_gen(self.gside, self.random_dim+self.cond_generator.n_opt, self.num_channels) layers_D = determine_layers_disc(self.dside,", "'tanh': ed = st + item[0] data_t.append(torch.tanh(data[:, st:ed])) st = ed elif item[1]", "self.transformer.output_info, c, m) _,info_real = discriminator(real_cat_d) g = -(torch.log(y_fake + 1e-4).mean()) + cross_entropy", "dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket,output_info)", "loss_info.backward() optimizerG.step() if problem_type: fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket,", "fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) fake_cat = torch.cat([fakeact,", "ed = st + item[0] tmp = np.sum(data[:, st:ed], axis=0) tmp_sampling = np.sum(data[:,", "((self.str_end[1]-self.str_end[0])==1): return self.seq(new_imp).view(-1), label else: return self.seq(new_imp), label def apply_activate(data, output_info): data_t =", "= condvec c = torch.from_numpy(c).to(self.device) m = torch.from_numpy(m).to(self.device) noisez = torch.cat([noisez, c], dim=1)", "+= [ Conv2d(layer_dims[-1][0], 1, layer_dims[-1][1], 1, 0), Sigmoid() ] return layers_D def determine_layers_gen(side,", "real_label = classifier(real) fake_pre, fake_label = classifier(fakeact) c_loss = CrossEntropyLoss() if (st_ed[1] -", "fit(self, train_data=pd.DataFrame, categorical=[], mixed={}, type={}): problem_type = None target_index=None if type: problem_type =", "c = torch.from_numpy(c).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) fake =", "!= -1: init.normal_(m.weight.data, 1.0, 0.02) init.constant_(m.bias.data, 0) class CTABGANSynthesizer: def __init__(self, class_dim=(256, 256,", "= [4, 8, 16, 24, 32] col_size_g = data_dim for i in sides:", "ReLU(True), ConvTranspose2d(prev[0], curr[0], 4, 2, 1, output_padding=0, bias=True) ] return layers_G def weights_init(m):", "torch.norm(torch.std(info_fake.view(self.batch_size,-1), dim=0) - torch.std(info_real.view(self.batch_size,-1), dim=0), 1) loss_info = loss_mean + loss_std loss_info.backward() optimizerG.step()", "break layers_G = determine_layers_gen(self.gside, self.random_dim+self.cond_generator.n_opt, self.num_channels) layers_D = determine_layers_disc(self.dside, self.num_channels) self.generator = Generator(self.gside,", "batch = batch idx = np.random.choice(np.arange(self.n_col), batch) vec = np.zeros((batch, self.n_opt), dtype='float32') opt1prime", "* 2, layer_dims[-1][1] // 2)) layers_D = [] for prev, curr in zip(layer_dims,", "output_info: max_interval = max(max_interval, item[0]) return max_interval class Cond(object): def __init__(self, data, output_info):", "- st_ed[0])==1: c_loss= SmoothL1Loss() real_label = real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) real_label = torch.reshape(real_label,real_pre.size())", "noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) optimizerG.zero_grad() fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) fake_cat", "break sides = [4, 8, 16, 24, 32] col_size_g = data_dim for i", "BatchNorm2d, Sigmoid, init, BCELoss, CrossEntropyLoss,SmoothL1Loss) from model.synthesizer.transformer import ImageTransformer,DataTransformer from tqdm import tqdm", "'softmax': ed = st + item[0] tmp = [] for j in range(item[0]):", "c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) optimizerG.zero_grad() fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact", "Sigmoid, init, BCELoss, CrossEntropyLoss,SmoothL1Loss) from model.synthesizer.transformer import ImageTransformer,DataTransformer from tqdm import tqdm class", "import torch.optim as optim from torch.optim import Adam from torch.nn import functional as", "= real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) real_label = torch.reshape(real_label,real_pre.size()) fake_label = torch.reshape(fake_label,fake_pre.size()) elif (st_ed[1]", "class CTABGANSynthesizer: def __init__(self, class_dim=(256, 256, 256, 256), random_dim=100, num_channels=64, l2scale=1e-5, batch_size=500, epochs=1):", "np.asarray(self.interval) def sample_train(self, batch): if self.n_col == 0: return None batch = batch", "opt1prime = random_choice_prob_index(self.p[idx]) for i in np.arange(batch): vec[i, self.interval[idx[i], 0] + opt1prime[i]] =", "self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) fake_cat = torch.cat([fakeact, c], dim=1)", "= noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket,output_info) data.append(fakeact.detach().cpu().numpy()) data", "batch_size=500, epochs=1): self.random_dim = random_dim self.class_dim = class_dim self.num_channels = num_channels self.dside =", "if target_index != None: st_ed= get_st_ed(target_index,self.transformer.output_info) classifier = Classifier(data_dim,self.class_dim,st_ed).to(self.device) optimizerC = optim.Adam(classifier.parameters(),**optimizer_params) self.generator.apply(weights_init)", "self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket,output_info) data.append(fakeact.detach().cpu().numpy()) data = np.concatenate(data, axis=0) result", "= st + item[0] data_t.append(F.gumbel_softmax(data[:, st:ed], tau=0.2)) st = ed return torch.cat(data_t, dim=1)", "faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) real_pre, real_label = classifier(real) fake_pre, fake_label", "= 0 st_c = 0 for item in output_info: if item[1] == 'tanh':", "import tqdm class Classifier(Module): def __init__(self,input_dim, dis_dims,st_ed): super(Classifier,self).__init__() dim = input_dim-(st_ed[1]-st_ed[0]) seq =", "+ loss_std loss_info.backward() optimizerG.step() if problem_type: fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact", "/ np.sum(tmp_sampling) self.p_sampling.append(tmp_sampling) self.p[self.n_col, :item[0]] = tmp self.interval.append((self.n_opt, item[0])) self.n_opt += item[0] self.n_col", "return self.seq(input_) def determine_layers_disc(side, num_channels): assert side >= 4 and side <= 32", "= batch_size self.epochs = epochs self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") def", "idx = np.random.choice(np.arange(self.n_col), batch) vec = np.zeros((batch, self.n_opt), dtype='float32') opt1prime = random_choice_prob_index_sampling(self.p_sampling,idx) for", "col_size_d = data_dim + self.cond_generator.n_opt for i in sides: if i * i", "[] st = 0 counter = 0 for item in output_info: if item[1]", "= ed st_c = ed_c loss = torch.stack(loss, dim=1) return (loss * m).sum()", "seq += [ Linear(dim, item), LeakyReLU(0.2), Dropout(0.5) ] dim = item if (st_ed[1]-st_ed[0])==1:", "if item[1]=='tanh': st += item[0] elif item[1] == 'softmax': st += item[0] c+=1", "def random_choice_prob_index_sampling(probs,col_idx): option_list = [] for i in col_idx: pp = probs[i] option_list.append(np.random.choice(np.arange(len(probs[i])),", "[] self.n = len(data) st = 0 for item in output_info: if item[1]", "mixed_dict=mixed) self.transformer.fit() train_data = self.transformer.transform(train_data.values) data_sampler = Sampler(train_data, self.transformer.output_info) data_dim = self.transformer.output_dim self.cond_generator", "2, layer_dims[-1][1] // 2)) layers_G = [ ConvTranspose2d( random_dim, layer_dims[-1][0], layer_dims[-1][1], 1, 0,", "!= None: st_ed= get_st_ed(target_index,self.transformer.output_info) classifier = Classifier(data_dim,self.class_dim,st_ed).to(self.device) optimizerC = optim.Adam(classifier.parameters(),**optimizer_params) self.generator.apply(weights_init) discriminator.apply(weights_init) self.Gtransformer", "= None self.gside = None self.l2scale = l2scale self.batch_size = batch_size self.epochs =", "Adam from torch.nn import functional as F from torch.nn import (Dropout, LeakyReLU, Linear,", "import torch.utils.data import torch.optim as optim from torch.optim import Adam from torch.nn import", "__init__(self, side, layers): super(Discriminator, self).__init__() self.side = side info = len(layers)-2 self.seq =", "forward(self, input_): return self.seq(input_) def determine_layers_disc(side, num_channels): assert side >= 4 and side", "for i in sides: if i * i >= col_size_g: self.gside = i", "self.seq = Sequential(*seq) def forward(self, input): label=None if (self.str_end[1]-self.str_end[0])==1: label = input[:, self.str_end[0]:self.str_end[1]]", "16, 24, 32] col_size_d = data_dim + self.cond_generator.n_opt for i in sides: if", "= side self.seq = Sequential(*layers) def forward(self, input_): return self.seq(input_) def determine_layers_disc(side, num_channels):", "layers_D).to(self.device) optimizer_params = dict(lr=2e-4, betas=(0.5, 0.9), eps=1e-3, weight_decay=self.l2scale) optimizerG = Adam(self.generator.parameters(), **optimizer_params) optimizerD", "condvec c = torch.from_numpy(c).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) fake", "import numpy as np import pandas as pd import torch import torch.utils.data import", "= input[:, self.str_end[0]:self.str_end[1]] else: label = torch.argmax(input[:, self.str_end[0]:self.str_end[1]], axis=-1) new_imp = torch.cat((input[:,:self.str_end[0]],input[:,self.str_end[1]:]),1) if", "opt): if col is None: idx = np.random.choice(np.arange(self.n), n) return self.data[idx] idx =", "dtype='float32') opt1prime = random_choice_prob_index_sampling(self.p_sampling,idx) for i in np.arange(batch): vec[i, self.interval[idx[i], 0] + opt1prime[i]]", "real_pre, real_label = classifier(real) fake_pre, fake_label = classifier(fakeact) c_loss = CrossEntropyLoss() if (st_ed[1]", "= ed_c loss = torch.stack(loss, dim=1) return (loss * m).sum() / data.size()[0] class", "y_fake,info_fake = discriminator(fake_cat) cross_entropy = cond_loss(faket, self.transformer.output_info, c, m) _,info_real = discriminator(real_cat_d) g", "= tmp self.interval.append((self.n_opt, item[0])) self.n_opt += item[0] self.n_col += 1 st = ed", "device=self.device) condvec = self.cond_generator.sample_train(self.batch_size) c, m, col, opt = condvec c = torch.from_numpy(c).to(self.device)", "batch): if self.n_col == 0: return None batch = batch idx = np.random.choice(np.arange(self.n_col),", "data_sampler.sample(self.batch_size, col[perm], opt[perm]) c_perm = c[perm] real = torch.from_numpy(real.astype('float32')).to(self.device) fake = self.generator(noisez) faket", "loss_d = (-(torch.log(y_real + 1e-4).mean()) - (torch.log(1. - y_fake + 1e-4).mean())) loss_d.backward() optimizerD.step()", "loss_mean = torch.norm(torch.mean(info_fake.view(self.batch_size,-1), dim=0) - torch.mean(info_real.view(self.batch_size,-1), dim=0), 1) loss_std = torch.norm(torch.std(info_fake.view(self.batch_size,-1), dim=0) -", "c], dim=1) fake_cat = self.Dtransformer.transform(fake_cat) y_fake,info_fake = discriminator(fake_cat) cross_entropy = cond_loss(faket, self.transformer.output_info, c,", "class Cond(object): def __init__(self, data, output_info): self.model = [] st = 0 counter", "col[perm], opt[perm]) c_perm = c[perm] real = torch.from_numpy(real.astype('float32')).to(self.device) fake = self.generator(noisez) faket =", "fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) real_pre, real_label =", "self.model = [] self.n = len(data) st = 0 for item in output_info:", "st = 0 for item in output_info: if item[1] == 'tanh': ed =", "tmp = tmp / np.sum(tmp) tmp_sampling = tmp_sampling / np.sum(tmp_sampling) self.p_sampling.append(tmp_sampling) self.p[self.n_col, :item[0]]", "// 2)) layers_G = [ ConvTranspose2d( random_dim, layer_dims[-1][0], layer_dims[-1][1], 1, 0, output_padding=0, bias=False)", "self.transformer.output_info steps = n // self.batch_size + 1 data = [] for i", "determine_layers_gen(side, random_dim, num_channels): assert side >= 4 and side <= 32 layer_dims =", "= [] for prev, curr in zip(layer_dims, layer_dims[1:]): layers_D += [ Conv2d(prev[0], curr[0],", "= [] st = 0 counter = 0 for item in output_info: if", "np.random.choice(np.arange(self.n), n) return self.data[idx] idx = [] for c, o in zip(col, opt):", "__init__(self, class_dim=(256, 256, 256, 256), random_dim=100, num_channels=64, l2scale=1e-5, batch_size=500, epochs=1): self.random_dim = random_dim", "= noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) optimizerG.zero_grad() fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info)", "def sample_train(self, batch): if self.n_col == 0: return None batch = batch idx", "def __init__(self, side, layers): super(Generator, self).__init__() self.side = side self.seq = Sequential(*layers) def", "Sequential(*seq) def forward(self, input): label=None if (self.str_end[1]-self.str_end[0])==1: label = input[:, self.str_end[0]:self.str_end[1]] else: label", "axis=axis) return (a.cumsum(axis=axis) > r).argmax(axis=axis) def maximum_interval(output_info): max_interval = 0 for item in", "discriminator(fake_cat_d) loss_d = (-(torch.log(y_real + 1e-4).mean()) - (torch.log(1. - y_fake + 1e-4).mean())) loss_d.backward()", "determine_layers_gen(self.gside, self.random_dim+self.cond_generator.n_opt, self.num_channels) layers_D = determine_layers_disc(self.dside, self.num_channels) self.generator = Generator(self.gside, layers_G).to(self.device) discriminator =", "= torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") def fit(self, train_data=pd.DataFrame, categorical=[], mixed={}, type={}): problem_type", "vec, mask, idx, opt1prime def sample(self, batch): if self.n_col == 0: return None", "item[0] elif item[1] == 'softmax': st += item[0] c+=1 tc+=1 ed= st+output_info[tc][0] return", "for i in range(steps): noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample(self.batch_size) c", "output_info: if item[1] == 'tanh': ed = st + item[0] data_t.append(torch.tanh(data[:, st:ed])) st", "categorical_list=categorical, mixed_dict=mixed) self.transformer.fit() train_data = self.transformer.transform(train_data.values) data_sampler = Sampler(train_data, self.transformer.output_info) data_dim = self.transformer.output_dim", "in output_info: if item[1] == 'tanh': st += item[0] continue elif item[1] ==", "j])[0]) self.model.append(tmp) st = ed def sample(self, n, col, opt): if col is", "Linear(dim, item), LeakyReLU(0.2), Dropout(0.5) ] dim = item if (st_ed[1]-st_ed[0])==1: seq += [Linear(dim,", "self.transformer.fit() train_data = self.transformer.transform(train_data.values) data_sampler = Sampler(train_data, self.transformer.output_info) data_dim = self.transformer.output_dim self.cond_generator =", "return (a.cumsum(axis=axis) > r).argmax(axis=axis) def maximum_interval(output_info): max_interval = 0 for item in output_info:", "col_idx: pp = probs[i] option_list.append(np.random.choice(np.arange(len(probs[i])), p=pp)) return np.array(option_list).reshape(col_idx.shape) def random_choice_prob_index(a, axis=1): r =", "item[0] tmp = np.sum(data[:, st:ed], axis=0) tmp_sampling = np.sum(data[:, st:ed], axis=0) tmp =", "ed st_c = ed_c loss = torch.stack(loss, dim=1) return (loss * m).sum() /", "24, 32] col_size_g = data_dim for i in sides: if i * i", "// 2)] while layer_dims[-1][1] > 3 and len(layer_dims) < 4: layer_dims.append((layer_dims[-1][0] * 2,", "super(Discriminator, self).__init__() self.side = side info = len(layers)-2 self.seq = Sequential(*layers) self.seq_info =", "zip(layer_dims, layer_dims[1:]): layers_D += [ Conv2d(prev[0], curr[0], 4, 2, 1, bias=False), BatchNorm2d(curr[0]), LeakyReLU(0.2,", "vec[i, self.interval[idx[i], 0] + opt1prime[i]] = 1 return vec, mask, idx, opt1prime def", "= 0 self.n_opt = 0 st = 0 self.p = np.zeros((counter, maximum_interval(output_info))) self.p_sampling", "'softmax': ed = st + item[0] data_t.append(F.gumbel_softmax(data[:, st:ed], tau=0.2)) st = ed return", "* i >= col_size_g: self.gside = i break layers_G = determine_layers_gen(self.gside, self.random_dim+self.cond_generator.n_opt, self.num_channels)", "+= item[0] c+=1 tc+=1 ed= st+output_info[tc][0] return (st,ed) def random_choice_prob_index_sampling(probs,col_idx): option_list = []", "condvec = self.cond_generator.sample(self.batch_size) c = condvec c = torch.from_numpy(c).to(self.device) noisez = torch.cat([noisez, c],", "self.seq_info = Sequential(*layers[:info]) def forward(self, input): return (self.seq(input)), self.seq_info(input) class Generator(Module): def __init__(self,", "(st_ed[1]-st_ed[0])==2: seq += [Linear(dim, 1),Sigmoid()] else: seq += [Linear(dim,(st_ed[1]-st_ed[0]))] self.seq = Sequential(*seq) def", "= st + item[0] tmp = [] for j in range(item[0]): tmp.append(np.nonzero(data[:, st", "max(max_interval, item[0]) return max_interval class Cond(object): def __init__(self, data, output_info): self.model = []", "+ 1 data = [] for i in range(steps): noisez = torch.randn(self.batch_size, self.random_dim,", "+ item[0] tmp = np.sum(data[:, st:ed], axis=0) tmp_sampling = np.sum(data[:, st:ed], axis=0) tmp", "i in np.arange(batch): vec[i, self.interval[idx[i], 0] + opt1prime[i]] = 1 return vec def", "= self.cond_generator.sample(self.batch_size) c = condvec c = torch.from_numpy(c).to(self.device) noisez = torch.cat([noisez, c], dim=1)", "item[0] data_t.append(torch.tanh(data[:, st:ed])) st = ed elif item[1] == 'softmax': ed = st", "item[0] continue elif item[1] == 'softmax': ed = st + item[0] tmp =", "self.str_end[0]:self.str_end[1]], axis=-1) new_imp = torch.cat((input[:,:self.str_end[0]],input[:,self.str_end[1]:]),1) if ((self.str_end[1]-self.str_end[0])==2) | ((self.str_end[1]-self.str_end[0])==1): return self.seq(new_imp).view(-1), label else:", "= np.expand_dims(np.random.rand(a.shape[1 - axis]), axis=axis) return (a.cumsum(axis=axis) > r).argmax(axis=axis) def maximum_interval(output_info): max_interval =", "< 4: layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1] // 2)) layers_D = [] for prev,", "else \"cpu\") def fit(self, train_data=pd.DataFrame, categorical=[], mixed={}, type={}): problem_type = None target_index=None if", "self.p_sampling.append(tmp_sampling) self.p[self.n_col, :item[0]] = tmp self.interval.append((self.n_opt, item[0])) self.n_opt += item[0] self.n_col += 1", "bias=False) ] for prev, curr in zip(reversed(layer_dims), reversed(layer_dims[:-1])): layers_G += [ BatchNorm2d(prev[0]), ReLU(True),", "torch.mean(info_real.view(self.batch_size,-1), dim=0), 1) loss_std = torch.norm(torch.std(info_fake.view(self.batch_size,-1), dim=0) - torch.std(info_real.view(self.batch_size,-1), dim=0), 1) loss_info =", "side info = len(layers)-2 self.seq = Sequential(*layers) self.seq_info = Sequential(*layers[:info]) def forward(self, input):", "super(Classifier,self).__init__() dim = input_dim-(st_ed[1]-st_ed[0]) seq = [] self.str_end = st_ed for item in", "n): self.generator.eval() output_info = self.transformer.output_info steps = n // self.batch_size + 1 data", "+= [Linear(dim, 1),Sigmoid()] else: seq += [Linear(dim,(st_ed[1]-st_ed[0]))] self.seq = Sequential(*seq) def forward(self, input):", "0 for item in output_info: max_interval = max(max_interval, item[0]) return max_interval class Cond(object):", "'tanh': st += item[0] continue elif item[1] == 'softmax': ed = st +", "optimizerC.zero_grad() loss_cc.backward() optimizerC.step() def sample(self, n): self.generator.eval() output_info = self.transformer.output_info steps = n", "mask, idx, opt1prime def sample(self, batch): if self.n_col == 0: return None batch", "layer_dims = [(1, side), (num_channels, side // 2)] while layer_dims[-1][1] > 3 and", "= np.sum(data[:, st:ed], axis=0) tmp = np.log(tmp + 1) tmp = tmp /", "discriminator = Discriminator(self.dside, layers_D).to(self.device) optimizer_params = dict(lr=2e-4, betas=(0.5, 0.9), eps=1e-3, weight_decay=self.l2scale) optimizerG =", "= discriminator(real_cat_d) g = -(torch.log(y_fake + 1e-4).mean()) + cross_entropy g.backward(retain_graph=True) loss_mean = torch.norm(torch.mean(info_fake.view(self.batch_size,-1),", "(self.seq(input)), self.seq_info(input) class Generator(Module): def __init__(self, side, layers): super(Generator, self).__init__() self.side = side", "elif item[1] == 'softmax': ed = st + item[0] ed_c = st_c +", "ed self.interval = np.asarray(self.interval) def sample_train(self, batch): if self.n_col == 0: return None", "1, bias=False), BatchNorm2d(curr[0]), LeakyReLU(0.2, inplace=True) ] print() layers_D += [ Conv2d(layer_dims[-1][0], 1, layer_dims[-1][1],", "= apply_activate(faket, self.transformer.output_info) real_pre, real_label = classifier(real) fake_pre, fake_label = classifier(fakeact) c_loss =", "return vec, mask, idx, opt1prime def sample(self, batch): if self.n_col == 0: return", "batch_size self.epochs = epochs self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") def fit(self,", "output_info: if item[1] == 'tanh': st += item[0] continue elif item[1] == 'softmax':", "self.seq_info(input) class Generator(Module): def __init__(self, side, layers): super(Generator, self).__init__() self.side = side self.seq", "item[0])) self.n_opt += item[0] self.n_col += 1 st = ed self.interval = np.asarray(self.interval)", "for item in output_info: if item[1] == 'tanh': st += item[0] continue elif", "[ BatchNorm2d(prev[0]), ReLU(True), ConvTranspose2d(prev[0], curr[0], 4, 2, 1, output_padding=0, bias=True) ] return layers_G", "c= 0 tc= 0 for item in output_info: if c==target_col_index: break if item[1]=='tanh':", "discriminator(real_cat_d) g = -(torch.log(y_fake + 1e-4).mean()) + cross_entropy g.backward(retain_graph=True) loss_mean = torch.norm(torch.mean(info_fake.view(self.batch_size,-1), dim=0)", "-1: init.normal_(m.weight.data, 1.0, 0.02) init.constant_(m.bias.data, 0) class CTABGANSynthesizer: def __init__(self, class_dim=(256, 256, 256,", "[] for prev, curr in zip(layer_dims, layer_dims[1:]): layers_D += [ Conv2d(prev[0], curr[0], 4,", "prev, curr in zip(layer_dims, layer_dims[1:]): layers_D += [ Conv2d(prev[0], curr[0], 4, 2, 1,", "c+=1 tc+=1 ed= st+output_info[tc][0] return (st,ed) def random_choice_prob_index_sampling(probs,col_idx): option_list = [] for i", "mask[np.arange(batch), idx] = 1 opt1prime = random_choice_prob_index(self.p[idx]) for i in np.arange(batch): vec[i, self.interval[idx[i],", "2)) layers_D = [] for prev, curr in zip(layer_dims, layer_dims[1:]): layers_D += [", "problem_type: fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) real_pre, real_label", "random_dim self.class_dim = class_dim self.num_channels = num_channels self.dside = None self.gside = None", "= st_ed for item in list(dis_dims): seq += [ Linear(dim, item), LeakyReLU(0.2), Dropout(0.5)", "np.expand_dims(np.random.rand(a.shape[1 - axis]), axis=axis) return (a.cumsum(axis=axis) > r).argmax(axis=axis) def maximum_interval(output_info): max_interval = 0", "np.zeros((batch, self.n_opt), dtype='float32') opt1prime = random_choice_prob_index_sampling(self.p_sampling,idx) for i in np.arange(batch): vec[i, self.interval[idx[i], 0]", "Adam(discriminator.parameters(), **optimizer_params) st_ed = None classifier=None optimizerC= None if target_index != None: st_ed=", "inplace=True) ] print() layers_D += [ Conv2d(layer_dims[-1][0], 1, layer_dims[-1][1], 1, 0), Sigmoid() ]", "real_cat = torch.cat([real, c_perm], dim=1) real_cat_d = self.Dtransformer.transform(real_cat) fake_cat_d = self.Dtransformer.transform(fake_cat) optimizerD.zero_grad() y_real,_", "= self.cond_generator.sample_train(self.batch_size) c, m, col, opt = condvec c = torch.from_numpy(c).to(self.device) m =", "0: return None batch = batch idx = np.random.choice(np.arange(self.n_col), batch) vec = np.zeros((batch,", "torch.cat((input[:,:self.str_end[0]],input[:,self.str_end[1]:]),1) if ((self.str_end[1]-self.str_end[0])==2) | ((self.str_end[1]-self.str_end[0])==1): return self.seq(new_imp).view(-1), label else: return self.seq(new_imp), label def", "m).sum() / data.size()[0] class Sampler(object): def __init__(self, data, output_info): super(Sampler, self).__init__() self.data =", "= 0 for item in output_info: max_interval = max(max_interval, item[0]) return max_interval class", "self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") def fit(self, train_data=pd.DataFrame, categorical=[], mixed={}, type={}):", "[] for i in range(steps): noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample(self.batch_size)", "1 data = [] for i in range(steps): noisez = torch.randn(self.batch_size, self.random_dim, device=self.device)", "= batch idx = np.random.choice(np.arange(self.n_col), batch) vec = np.zeros((batch, self.n_opt), dtype='float32') mask =", "ImageTransformer(self.dside) steps_per_epoch = max(1, len(train_data) // self.batch_size) for i in tqdm(range(self.epochs)): for _", "data = [] for i in range(steps): noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec", "apply_activate(faket, self.transformer.output_info) fake_cat = torch.cat([fakeact, c], dim=1) fake_cat = self.Dtransformer.transform(fake_cat) y_fake,info_fake = discriminator(fake_cat)", "= st + item[0] counter += 1 self.model.append(np.argmax(data[:, st:ed], axis=-1)) st = ed", "batch) vec = np.zeros((batch, self.n_opt), dtype='float32') opt1prime = random_choice_prob_index_sampling(self.p_sampling,idx) for i in np.arange(batch):", "tqdm(range(self.epochs)): for _ in range(steps_per_epoch): noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample_train(self.batch_size)", "len(data) st = 0 for item in output_info: if item[1] == 'tanh': st", "self.cond_generator.sample(self.batch_size) c = condvec c = torch.from_numpy(c).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez", "noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) optimizerG.zero_grad() fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket,", "= self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket,output_info) data.append(fakeact.detach().cpu().numpy()) data = np.concatenate(data, axis=0)", "- st_ed[0])==2: c_loss = BCELoss() real_label = real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) loss_cc =", "+= [Linear(dim, 1)] elif (st_ed[1]-st_ed[0])==2: seq += [Linear(dim, 1),Sigmoid()] else: seq += [Linear(dim,(st_ed[1]-st_ed[0]))]", "0, output_padding=0, bias=False) ] for prev, curr in zip(reversed(layer_dims), reversed(layer_dims[:-1])): layers_G += [", "in tqdm(range(self.epochs)): for _ in range(steps_per_epoch): noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec =", "data, output_info): self.model = [] st = 0 counter = 0 for item", "layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1] // 2)) layers_D = [] for prev, curr in", "option_list.append(np.random.choice(np.arange(len(probs[i])), p=pp)) return np.array(option_list).reshape(col_idx.shape) def random_choice_prob_index(a, axis=1): r = np.expand_dims(np.random.rand(a.shape[1 - axis]), axis=axis)", "3 and len(layer_dims) < 4: layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1] // 2)) layers_G =", "i in np.arange(batch): vec[i, self.interval[idx[i], 0] + opt1prime[i]] = 1 return vec, mask,", "class Classifier(Module): def __init__(self,input_dim, dis_dims,st_ed): super(Classifier,self).__init__() dim = input_dim-(st_ed[1]-st_ed[0]) seq = [] self.str_end", "st + item[0] data_t.append(torch.tanh(data[:, st:ed])) st = ed elif item[1] == 'softmax': ed", "0) class CTABGANSynthesizer: def __init__(self, class_dim=(256, 256, 256, 256), random_dim=100, num_channels=64, l2scale=1e-5, batch_size=500,", "batch = batch idx = np.random.choice(np.arange(self.n_col), batch) vec = np.zeros((batch, self.n_opt), dtype='float32') mask", "= random_choice_prob_index_sampling(self.p_sampling,idx) for i in np.arange(batch): vec[i, self.interval[idx[i], 0] + opt1prime[i]] = 1", "layers_D += [ Conv2d(prev[0], curr[0], 4, 2, 1, bias=False), BatchNorm2d(curr[0]), LeakyReLU(0.2, inplace=True) ]", "= c[perm] real = torch.from_numpy(real.astype('float32')).to(self.device) fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact =", "[ Conv2d(prev[0], curr[0], 4, 2, 1, bias=False), BatchNorm2d(curr[0]), LeakyReLU(0.2, inplace=True) ] print() layers_D", "F from torch.nn import (Dropout, LeakyReLU, Linear, Module, ReLU, Sequential, Conv2d, ConvTranspose2d, BatchNorm2d,", "= self.Dtransformer.transform(fake_cat) optimizerD.zero_grad() y_real,_ = discriminator(real_cat_d) y_fake,_ = discriminator(fake_cat_d) loss_d = (-(torch.log(y_real +", "1 self.model.append(np.argmax(data[:, st:ed], axis=-1)) st = ed self.interval = [] self.n_col = 0", "def forward(self, input_): return self.seq(input_) def determine_layers_disc(side, num_channels): assert side >= 4 and", "dim=1) def get_st_ed(target_col_index,output_info): st = 0 c= 0 tc= 0 for item in", "torch.from_numpy(real.astype('float32')).to(self.device) fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) fake_cat =", "st_ed= get_st_ed(target_index,self.transformer.output_info) classifier = Classifier(data_dim,self.class_dim,st_ed).to(self.device) optimizerC = optim.Adam(classifier.parameters(),**optimizer_params) self.generator.apply(weights_init) discriminator.apply(weights_init) self.Gtransformer = ImageTransformer(self.gside)", "st += item[0] continue elif item[1] == 'softmax': ed = st + item[0]", "eps=1e-3, weight_decay=self.l2scale) optimizerG = Adam(self.generator.parameters(), **optimizer_params) optimizerD = Adam(discriminator.parameters(), **optimizer_params) st_ed = None", "self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) real_pre, real_label = classifier(real) fake_pre,", "output_info): super(Sampler, self).__init__() self.data = data self.model = [] self.n = len(data) st", "zip(col, opt): idx.append(np.random.choice(self.model[c][o])) return self.data[idx] class Discriminator(Module): def __init__(self, side, layers): super(Discriminator, self).__init__()", "real = torch.from_numpy(real.astype('float32')).to(self.device) fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info)", "in np.arange(batch): vec[i, self.interval[idx[i], 0] + opt1prime[i]] = 1 return vec, mask, idx,", "== 'softmax': ed = st + item[0] tmp = np.sum(data[:, st:ed], axis=0) tmp_sampling", "[(1, side), (num_channels, side // 2)] while layer_dims[-1][1] > 3 and len(layer_dims) <", "item[0] data_t.append(F.gumbel_softmax(data[:, st:ed], tau=0.2)) st = ed return torch.cat(data_t, dim=1) def get_st_ed(target_col_index,output_info): st", "st+output_info[tc][0] return (st,ed) def random_choice_prob_index_sampling(probs,col_idx): option_list = [] for i in col_idx: pp", "= list(type.keys())[0] if problem_type: target_index = train_data.columns.get_loc(type[problem_type]) self.transformer = DataTransformer(train_data=train_data, categorical_list=categorical, mixed_dict=mixed) self.transformer.fit()", "= Classifier(data_dim,self.class_dim,st_ed).to(self.device) optimizerC = optim.Adam(classifier.parameters(),**optimizer_params) self.generator.apply(weights_init) discriminator.apply(weights_init) self.Gtransformer = ImageTransformer(self.gside) self.Dtransformer = ImageTransformer(self.dside)", "= -(torch.log(y_fake + 1e-4).mean()) + cross_entropy g.backward(retain_graph=True) loss_mean = torch.norm(torch.mean(info_fake.view(self.batch_size,-1), dim=0) - torch.mean(info_real.view(self.batch_size,-1),", "[ Conv2d(layer_dims[-1][0], 1, layer_dims[-1][1], 1, 0), Sigmoid() ] return layers_D def determine_layers_gen(side, random_dim,", "layer_dims[-1][1] > 3 and len(layer_dims) < 4: layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1] // 2))", "= Sequential(*layers) self.seq_info = Sequential(*layers[:info]) def forward(self, input): return (self.seq(input)), self.seq_info(input) class Generator(Module):", "m = torch.from_numpy(m).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) perm =", "loss.append(tmp) st = ed st_c = ed_c loss = torch.stack(loss, dim=1) return (loss", "from model.synthesizer.transformer import ImageTransformer,DataTransformer from tqdm import tqdm class Classifier(Module): def __init__(self,input_dim, dis_dims,st_ed):", "[] self.str_end = st_ed for item in list(dis_dims): seq += [ Linear(dim, item),", "item in output_info: max_interval = max(max_interval, item[0]) return max_interval class Cond(object): def __init__(self,", "item[0] self.n_col += 1 st = ed self.interval = np.asarray(self.interval) def sample_train(self, batch):", "sample(self, batch): if self.n_col == 0: return None batch = batch idx =", "= ed elif item[1] == 'softmax': ed = st + item[0] data_t.append(F.gumbel_softmax(data[:, st:ed],", "torch.from_numpy(c).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) fake = self.generator(noisez) faket", "np.random.choice(np.arange(self.n_col), batch) vec = np.zeros((batch, self.n_opt), dtype='float32') mask = np.zeros((batch, self.n_col), dtype='float32') mask[np.arange(batch),", "optimizerD.step() noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample_train(self.batch_size) c, m, col, opt", "= n // self.batch_size + 1 data = [] for i in range(steps):", "curr in zip(reversed(layer_dims), reversed(layer_dims[:-1])): layers_G += [ BatchNorm2d(prev[0]), ReLU(True), ConvTranspose2d(prev[0], curr[0], 4, 2,", "self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) fake_cat = torch.cat([fakeact, c], dim=1) real_cat = torch.cat([real,", "g = -(torch.log(y_fake + 1e-4).mean()) + cross_entropy g.backward(retain_graph=True) loss_mean = torch.norm(torch.mean(info_fake.view(self.batch_size,-1), dim=0) -", "item[1] == 'softmax': ed = st + item[0] data_t.append(F.gumbel_softmax(data[:, st:ed], tau=0.2)) st =", "loss_cg.backward() optimizerG.step() optimizerC.zero_grad() loss_cc.backward() optimizerC.step() def sample(self, n): self.generator.eval() output_info = self.transformer.output_info steps", "opt = condvec c = torch.from_numpy(c).to(self.device) m = torch.from_numpy(m).to(self.device) noisez = torch.cat([noisez, c],", "item in output_info: if c==target_col_index: break if item[1]=='tanh': st += item[0] elif item[1]", "self.generator.eval() output_info = self.transformer.output_info steps = n // self.batch_size + 1 data =", "(a.cumsum(axis=axis) > r).argmax(axis=axis) def maximum_interval(output_info): max_interval = 0 for item in output_info: max_interval", "- axis]), axis=axis) return (a.cumsum(axis=axis) > r).argmax(axis=axis) def maximum_interval(output_info): max_interval = 0 for", "st = ed return torch.cat(data_t, dim=1) def get_st_ed(target_col_index,output_info): st = 0 c= 0", "item[0] ed_c = st_c + item[0] tmp = F.cross_entropy( data[:, st:ed], torch.argmax(c[:, st_c:ed_c],", "torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) optimizerG.zero_grad() fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake)", "continue elif item[1] == 'softmax': ed = st + item[0] ed_c = st_c", "+ item[0] counter += 1 self.model.append(np.argmax(data[:, st:ed], axis=-1)) st = ed self.interval =", "3 and len(layer_dims) < 4: layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1] // 2)) layers_D =", "/ data.size()[0] class Sampler(object): def __init__(self, data, output_info): super(Sampler, self).__init__() self.data = data", "in col_idx: pp = probs[i] option_list.append(np.random.choice(np.arange(len(probs[i])), p=pp)) return np.array(option_list).reshape(col_idx.shape) def random_choice_prob_index(a, axis=1): r", "= determine_layers_disc(self.dside, self.num_channels) self.generator = Generator(self.gside, layers_G).to(self.device) discriminator = Discriminator(self.dside, layers_D).to(self.device) optimizer_params =", "] print() layers_D += [ Conv2d(layer_dims[-1][0], 1, layer_dims[-1][1], 1, 0), Sigmoid() ] return", "0.0, 0.02) elif classname.find('BatchNorm') != -1: init.normal_(m.weight.data, 1.0, 0.02) init.constant_(m.bias.data, 0) class CTABGANSynthesizer:", "self.Dtransformer.transform(fake_cat) y_fake,info_fake = discriminator(fake_cat) cross_entropy = cond_loss(faket, self.transformer.output_info, c, m) _,info_real = discriminator(real_cat_d)", "0 self.n_opt = 0 st = 0 self.p = np.zeros((counter, maximum_interval(output_info))) self.p_sampling =", "input_dim-(st_ed[1]-st_ed[0]) seq = [] self.str_end = st_ed for item in list(dis_dims): seq +=", "= torch.reshape(real_label,real_pre.size()) fake_label = torch.reshape(fake_label,fake_pre.size()) elif (st_ed[1] - st_ed[0])==2: c_loss = BCELoss() real_label", "= item if (st_ed[1]-st_ed[0])==1: seq += [Linear(dim, 1)] elif (st_ed[1]-st_ed[0])==2: seq += [Linear(dim,", "dtype='float32') mask[np.arange(batch), idx] = 1 opt1prime = random_choice_prob_index(self.p[idx]) for i in np.arange(batch): vec[i,", "((self.str_end[1]-self.str_end[0])==2) | ((self.str_end[1]-self.str_end[0])==1): return self.seq(new_imp).view(-1), label else: return self.seq(new_imp), label def apply_activate(data, output_info):", "* 2, layer_dims[-1][1] // 2)) layers_G = [ ConvTranspose2d( random_dim, layer_dims[-1][0], layer_dims[-1][1], 1,", "] for prev, curr in zip(reversed(layer_dims), reversed(layer_dims[:-1])): layers_G += [ BatchNorm2d(prev[0]), ReLU(True), ConvTranspose2d(prev[0],", "problem_type = list(type.keys())[0] if problem_type: target_index = train_data.columns.get_loc(type[problem_type]) self.transformer = DataTransformer(train_data=train_data, categorical_list=categorical, mixed_dict=mixed)", "if problem_type: fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) real_pre,", "32] col_size_g = data_dim for i in sides: if i * i >=", "loss_d.backward() optimizerD.step() noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample_train(self.batch_size) c, m, col,", "+= 1 st = ed self.interval = np.asarray(self.interval) def sample_train(self, batch): if self.n_col", "torch.from_numpy(c).to(self.device) m = torch.from_numpy(m).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) perm", "categorical=[], mixed={}, type={}): problem_type = None target_index=None if type: problem_type = list(type.keys())[0] if", "= None target_index=None if type: problem_type = list(type.keys())[0] if problem_type: target_index = train_data.columns.get_loc(type[problem_type])", "self.interval[idx[i], 0] + opt1prime[i]] = 1 return vec def cond_loss(data, output_info, c, m):", "= class_dim self.num_channels = num_channels self.dside = None self.gside = None self.l2scale =", "= (-(torch.log(y_real + 1e-4).mean()) - (torch.log(1. - y_fake + 1e-4).mean())) loss_d.backward() optimizerD.step() noisez", "= torch.from_numpy(real.astype('float32')).to(self.device) fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) fake_cat", "condvec = self.cond_generator.sample_train(self.batch_size) c, m, col, opt = condvec c = torch.from_numpy(c).to(self.device) m", "fake_label = torch.reshape(fake_label,fake_pre.size()) elif (st_ed[1] - st_ed[0])==2: c_loss = BCELoss() real_label = real_label.type_as(real_pre)", "np.random.choice(np.arange(self.n_col), batch) vec = np.zeros((batch, self.n_opt), dtype='float32') opt1prime = random_choice_prob_index_sampling(self.p_sampling,idx) for i in", "+= item[0] continue elif item[1] == 'softmax': ed = st + item[0] ed_c", "== 'tanh': st += item[0] continue elif item[1] == 'softmax': ed = st", "from torch.optim import Adam from torch.nn import functional as F from torch.nn import", "axis=-1) new_imp = torch.cat((input[:,:self.str_end[0]],input[:,self.str_end[1]:]),1) if ((self.str_end[1]-self.str_end[0])==2) | ((self.str_end[1]-self.str_end[0])==1): return self.seq(new_imp).view(-1), label else: return", "self.class_dim = class_dim self.num_channels = num_channels self.dside = None self.gside = None self.l2scale", "[] self.n_col = 0 self.n_opt = 0 st = 0 self.p = np.zeros((counter,", "classname = m.__class__.__name__ if classname.find('Conv') != -1: init.normal_(m.weight.data, 0.0, 0.02) elif classname.find('BatchNorm') !=", "* m).sum() / data.size()[0] class Sampler(object): def __init__(self, data, output_info): super(Sampler, self).__init__() self.data", "as pd import torch import torch.utils.data import torch.optim as optim from torch.optim import", ">= 4 and side <= 32 layer_dims = [(1, side), (num_channels, side //", "i in sides: if i * i >= col_size_g: self.gside = i break", "i >= col_size_d: self.dside = i break sides = [4, 8, 16, 24,", "= m.__class__.__name__ if classname.find('Conv') != -1: init.normal_(m.weight.data, 0.0, 0.02) elif classname.find('BatchNorm') != -1:", "self.random_dim, device=self.device) condvec = self.cond_generator.sample(self.batch_size) c = condvec c = torch.from_numpy(c).to(self.device) noisez =", "self.interval = [] self.n_col = 0 self.n_opt = 0 st = 0 self.p", "in np.arange(batch): vec[i, self.interval[idx[i], 0] + opt1prime[i]] = 1 return vec def cond_loss(data,", "layers_G = [ ConvTranspose2d( random_dim, layer_dims[-1][0], layer_dims[-1][1], 1, 0, output_padding=0, bias=False) ] for", "data_t.append(torch.tanh(data[:, st:ed])) st = ed elif item[1] == 'softmax': ed = st +", "from tqdm import tqdm class Classifier(Module): def __init__(self,input_dim, dis_dims,st_ed): super(Classifier,self).__init__() dim = input_dim-(st_ed[1]-st_ed[0])", "item[1] == 'softmax': ed = st + item[0] tmp = [] for j", "n, col, opt): if col is None: idx = np.random.choice(np.arange(self.n), n) return self.data[idx]", "= Adam(discriminator.parameters(), **optimizer_params) st_ed = None classifier=None optimizerC= None if target_index != None:", "tc= 0 for item in output_info: if c==target_col_index: break if item[1]=='tanh': st +=", "list(type.keys())[0] if problem_type: target_index = train_data.columns.get_loc(type[problem_type]) self.transformer = DataTransformer(train_data=train_data, categorical_list=categorical, mixed_dict=mixed) self.transformer.fit() train_data", "in sides: if i * i >= col_size_g: self.gside = i break layers_G", "st:ed], torch.argmax(c[:, st_c:ed_c], dim=1), reduction='none') loss.append(tmp) st = ed st_c = ed_c loss", "= torch.norm(torch.std(info_fake.view(self.batch_size,-1), dim=0) - torch.std(info_real.view(self.batch_size,-1), dim=0), 1) loss_info = loss_mean + loss_std loss_info.backward()", "if c==target_col_index: break if item[1]=='tanh': st += item[0] elif item[1] == 'softmax': st", "problem_type = None target_index=None if type: problem_type = list(type.keys())[0] if problem_type: target_index =", "determine_layers_disc(self.dside, self.num_channels) self.generator = Generator(self.gside, layers_G).to(self.device) discriminator = Discriminator(self.dside, layers_D).to(self.device) optimizer_params = dict(lr=2e-4,", "torch.reshape(real_label,real_pre.size()) fake_label = torch.reshape(fake_label,fake_pre.size()) elif (st_ed[1] - st_ed[0])==2: c_loss = BCELoss() real_label =", "optim from torch.optim import Adam from torch.nn import functional as F from torch.nn", "st = 0 for item in output_info: if item[1] == 'tanh': st +=", "o in zip(col, opt): idx.append(np.random.choice(self.model[c][o])) return self.data[idx] class Discriminator(Module): def __init__(self, side, layers):", "output_info): data_t = [] st = 0 for item in output_info: if item[1]", "+ item[0] data_t.append(F.gumbel_softmax(data[:, st:ed], tau=0.2)) st = ed return torch.cat(data_t, dim=1) def get_st_ed(target_col_index,output_info):", "tmp_sampling / np.sum(tmp_sampling) self.p_sampling.append(tmp_sampling) self.p[self.n_col, :item[0]] = tmp self.interval.append((self.n_opt, item[0])) self.n_opt += item[0]", "+ opt1prime[i]] = 1 return vec, mask, idx, opt1prime def sample(self, batch): if", "= ImageTransformer(self.gside) self.Dtransformer = ImageTransformer(self.dside) steps_per_epoch = max(1, len(train_data) // self.batch_size) for i", "= self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) real_pre, real_label = classifier(real)", "i * i >= col_size_g: self.gside = i break layers_G = determine_layers_gen(self.gside, self.random_dim+self.cond_generator.n_opt,", "item[1] == 'tanh': ed = st + item[0] data_t.append(torch.tanh(data[:, st:ed])) st = ed", "def __init__(self, class_dim=(256, 256, 256, 256), random_dim=100, num_channels=64, l2scale=1e-5, batch_size=500, epochs=1): self.random_dim =", "2, layer_dims[-1][1] // 2)) layers_D = [] for prev, curr in zip(layer_dims, layer_dims[1:]):", "item), LeakyReLU(0.2), Dropout(0.5) ] dim = item if (st_ed[1]-st_ed[0])==1: seq += [Linear(dim, 1)]", "and side <= 32 layer_dims = [(1, side), (num_channels, side // 2)] while", "item[1] == 'tanh': st += item[0] continue elif item[1] == 'softmax': ed =", "if ((self.str_end[1]-self.str_end[0])==2) | ((self.str_end[1]-self.str_end[0])==1): return self.seq(new_imp).view(-1), label else: return self.seq(new_imp), label def apply_activate(data,", "self).__init__() self.data = data self.model = [] self.n = len(data) st = 0", "Sampler(train_data, self.transformer.output_info) data_dim = self.transformer.output_dim self.cond_generator = Cond(train_data, self.transformer.output_info) sides = [4, 8,", "Cond(object): def __init__(self, data, output_info): self.model = [] st = 0 counter =", "self.interval = np.asarray(self.interval) def sample_train(self, batch): if self.n_col == 0: return None batch", "0] + opt1prime[i]] = 1 return vec, mask, idx, opt1prime def sample(self, batch):", "== 'softmax': ed = st + item[0] ed_c = st_c + item[0] tmp", "class_dim self.num_channels = num_channels self.dside = None self.gside = None self.l2scale = l2scale", "axis=0) tmp_sampling = np.sum(data[:, st:ed], axis=0) tmp = np.log(tmp + 1) tmp =", "optimizerD = Adam(discriminator.parameters(), **optimizer_params) st_ed = None classifier=None optimizerC= None if target_index !=", "= l2scale self.batch_size = batch_size self.epochs = epochs self.device = torch.device(\"cuda:0\" if torch.cuda.is_available()", "torch.optim as optim from torch.optim import Adam from torch.nn import functional as F", "+ j])[0]) self.model.append(tmp) st = ed def sample(self, n, col, opt): if col", "= input_dim-(st_ed[1]-st_ed[0]) seq = [] self.str_end = st_ed for item in list(dis_dims): seq", "optimizerC = optim.Adam(classifier.parameters(),**optimizer_params) self.generator.apply(weights_init) discriminator.apply(weights_init) self.Gtransformer = ImageTransformer(self.gside) self.Dtransformer = ImageTransformer(self.dside) steps_per_epoch =", "Sequential(*layers) def forward(self, input_): return self.seq(input_) def determine_layers_disc(side, num_channels): assert side >= 4", "counter = 0 for item in output_info: if item[1] == 'tanh': st +=", "real_cat_d = self.Dtransformer.transform(real_cat) fake_cat_d = self.Dtransformer.transform(fake_cat) optimizerD.zero_grad() y_real,_ = discriminator(real_cat_d) y_fake,_ = discriminator(fake_cat_d)", "+= [ Conv2d(prev[0], curr[0], 4, 2, 1, bias=False), BatchNorm2d(curr[0]), LeakyReLU(0.2, inplace=True) ] print()", "BatchNorm2d(curr[0]), LeakyReLU(0.2, inplace=True) ] print() layers_D += [ Conv2d(layer_dims[-1][0], 1, layer_dims[-1][1], 1, 0),", "tqdm class Classifier(Module): def __init__(self,input_dim, dis_dims,st_ed): super(Classifier,self).__init__() dim = input_dim-(st_ed[1]-st_ed[0]) seq = []", "Classifier(Module): def __init__(self,input_dim, dis_dims,st_ed): super(Classifier,self).__init__() dim = input_dim-(st_ed[1]-st_ed[0]) seq = [] self.str_end =", "self.cond_generator.n_opt for i in sides: if i * i >= col_size_d: self.dside =", "self.generator.apply(weights_init) discriminator.apply(weights_init) self.Gtransformer = ImageTransformer(self.gside) self.Dtransformer = ImageTransformer(self.dside) steps_per_epoch = max(1, len(train_data) //", "= classifier(fakeact) c_loss = CrossEntropyLoss() if (st_ed[1] - st_ed[0])==1: c_loss= SmoothL1Loss() real_label =", "= [] self.n_col = 0 self.n_opt = 0 st = 0 self.p =", "item[0]) return max_interval class Cond(object): def __init__(self, data, output_info): self.model = [] st", "reduction='none') loss.append(tmp) st = ed st_c = ed_c loss = torch.stack(loss, dim=1) return", "item[1] == 'softmax': ed = st + item[0] ed_c = st_c + item[0]", "st + j])[0]) self.model.append(tmp) st = ed def sample(self, n, col, opt): if", "= [] for item in output_info: if item[1] == 'tanh': st += item[0]", "fake_pre, fake_label = classifier(fakeact) c_loss = CrossEntropyLoss() if (st_ed[1] - st_ed[0])==1: c_loss= SmoothL1Loss()", "steps = n // self.batch_size + 1 data = [] for i in", "data_t.append(F.gumbel_softmax(data[:, st:ed], tau=0.2)) st = ed return torch.cat(data_t, dim=1) def get_st_ed(target_col_index,output_info): st =", "1, 0), Sigmoid() ] return layers_D def determine_layers_gen(side, random_dim, num_channels): assert side >=", "in list(dis_dims): seq += [ Linear(dim, item), LeakyReLU(0.2), Dropout(0.5) ] dim = item", "np.log(tmp + 1) tmp = tmp / np.sum(tmp) tmp_sampling = tmp_sampling / np.sum(tmp_sampling)", "info = len(layers)-2 self.seq = Sequential(*layers) self.seq_info = Sequential(*layers[:info]) def forward(self, input): return", "layer_dims[-1][1], 1, 0, output_padding=0, bias=False) ] for prev, curr in zip(reversed(layer_dims), reversed(layer_dims[:-1])): layers_G", "= [ ConvTranspose2d( random_dim, layer_dims[-1][0], layer_dims[-1][1], 1, 0, output_padding=0, bias=False) ] for prev,", "in zip(reversed(layer_dims), reversed(layer_dims[:-1])): layers_G += [ BatchNorm2d(prev[0]), ReLU(True), ConvTranspose2d(prev[0], curr[0], 4, 2, 1,", "dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) optimizerG.zero_grad() fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact =", "= torch.norm(torch.mean(info_fake.view(self.batch_size,-1), dim=0) - torch.mean(info_real.view(self.batch_size,-1), dim=0), 1) loss_std = torch.norm(torch.std(info_fake.view(self.batch_size,-1), dim=0) - torch.std(info_real.view(self.batch_size,-1),", "self.Gtransformer = ImageTransformer(self.gside) self.Dtransformer = ImageTransformer(self.dside) steps_per_epoch = max(1, len(train_data) // self.batch_size) for", "= tmp / np.sum(tmp) tmp_sampling = tmp_sampling / np.sum(tmp_sampling) self.p_sampling.append(tmp_sampling) self.p[self.n_col, :item[0]] =", "= [] self.n = len(data) st = 0 for item in output_info: if", "self.num_channels = num_channels self.dside = None self.gside = None self.l2scale = l2scale self.batch_size", "= np.random.choice(np.arange(self.n), n) return self.data[idx] idx = [] for c, o in zip(col,", "ImageTransformer,DataTransformer from tqdm import tqdm class Classifier(Module): def __init__(self,input_dim, dis_dims,st_ed): super(Classifier,self).__init__() dim =", "8, 16, 24, 32] col_size_d = data_dim + self.cond_generator.n_opt for i in sides:", "reversed(layer_dims[:-1])): layers_G += [ BatchNorm2d(prev[0]), ReLU(True), ConvTranspose2d(prev[0], curr[0], 4, 2, 1, output_padding=0, bias=True)", "sides: if i * i >= col_size_g: self.gside = i break layers_G =", "self.transformer.output_info) fake_cat = torch.cat([fakeact, c], dim=1) fake_cat = self.Dtransformer.transform(fake_cat) y_fake,info_fake = discriminator(fake_cat) cross_entropy", "+ opt1prime[i]] = 1 return vec def cond_loss(data, output_info, c, m): loss =", "real_label = torch.reshape(real_label,real_pre.size()) fake_label = torch.reshape(fake_label,fake_pre.size()) elif (st_ed[1] - st_ed[0])==2: c_loss = BCELoss()", "= max(1, len(train_data) // self.batch_size) for i in tqdm(range(self.epochs)): for _ in range(steps_per_epoch):", "label = torch.argmax(input[:, self.str_end[0]:self.str_end[1]], axis=-1) new_imp = torch.cat((input[:,:self.str_end[0]],input[:,self.str_end[1]:]),1) if ((self.str_end[1]-self.str_end[0])==2) | ((self.str_end[1]-self.str_end[0])==1): return", "i in tqdm(range(self.epochs)): for _ in range(steps_per_epoch): noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec", "+ 1e-4).mean())) loss_d.backward() optimizerD.step() noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample_train(self.batch_size) c,", "cross_entropy g.backward(retain_graph=True) loss_mean = torch.norm(torch.mean(info_fake.view(self.batch_size,-1), dim=0) - torch.mean(info_real.view(self.batch_size,-1), dim=0), 1) loss_std = torch.norm(torch.std(info_fake.view(self.batch_size,-1),", "if (st_ed[1]-st_ed[0])==1: seq += [Linear(dim, 1)] elif (st_ed[1]-st_ed[0])==2: seq += [Linear(dim, 1),Sigmoid()] else:", "= [] st = 0 for item in output_info: if item[1] == 'tanh':", "self.n_col = 0 self.n_opt = 0 st = 0 self.p = np.zeros((counter, maximum_interval(output_info)))", "weights_init(m): classname = m.__class__.__name__ if classname.find('Conv') != -1: init.normal_(m.weight.data, 0.0, 0.02) elif classname.find('BatchNorm')", "+ cross_entropy g.backward(retain_graph=True) loss_mean = torch.norm(torch.mean(info_fake.view(self.batch_size,-1), dim=0) - torch.mean(info_real.view(self.batch_size,-1), dim=0), 1) loss_std =", "256), random_dim=100, num_channels=64, l2scale=1e-5, batch_size=500, epochs=1): self.random_dim = random_dim self.class_dim = class_dim self.num_channels", "None if target_index != None: st_ed= get_st_ed(target_index,self.transformer.output_info) classifier = Classifier(data_dim,self.class_dim,st_ed).to(self.device) optimizerC = optim.Adam(classifier.parameters(),**optimizer_params)", "/ np.sum(tmp) tmp_sampling = tmp_sampling / np.sum(tmp_sampling) self.p_sampling.append(tmp_sampling) self.p[self.n_col, :item[0]] = tmp self.interval.append((self.n_opt,", "tmp = np.log(tmp + 1) tmp = tmp / np.sum(tmp) tmp_sampling = tmp_sampling", "= torch.argmax(input[:, self.str_end[0]:self.str_end[1]], axis=-1) new_imp = torch.cat((input[:,:self.str_end[0]],input[:,self.str_end[1]:]),1) if ((self.str_end[1]-self.str_end[0])==2) | ((self.str_end[1]-self.str_end[0])==1): return self.seq(new_imp).view(-1),", "real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) loss_cc = c_loss(real_pre, real_label) loss_cg = c_loss(fake_pre, fake_label) optimizerG.zero_grad()", "else: label = torch.argmax(input[:, self.str_end[0]:self.str_end[1]], axis=-1) new_imp = torch.cat((input[:,:self.str_end[0]],input[:,self.str_end[1]:]),1) if ((self.str_end[1]-self.str_end[0])==2) | ((self.str_end[1]-self.str_end[0])==1):", "+= item[0] continue elif item[1] == 'softmax': ed = st + item[0] tmp", "1, 0, output_padding=0, bias=False) ] for prev, curr in zip(reversed(layer_dims), reversed(layer_dims[:-1])): layers_G +=", "= torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) perm = np.arange(self.batch_size) np.random.shuffle(perm) real =", "self.dside = i break sides = [4, 8, 16, 24, 32] col_size_g =", "self.interval.append((self.n_opt, item[0])) self.n_opt += item[0] self.n_col += 1 st = ed self.interval =", "1 return vec, mask, idx, opt1prime def sample(self, batch): if self.n_col == 0:", "layers_G def weights_init(m): classname = m.__class__.__name__ if classname.find('Conv') != -1: init.normal_(m.weight.data, 0.0, 0.02)", "noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) fake = self.generator(noisez) faket =", "def weights_init(m): classname = m.__class__.__name__ if classname.find('Conv') != -1: init.normal_(m.weight.data, 0.0, 0.02) elif", "(torch.log(1. - y_fake + 1e-4).mean())) loss_d.backward() optimizerD.step() noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec", "pd import torch import torch.utils.data import torch.optim as optim from torch.optim import Adam", "] dim = item if (st_ed[1]-st_ed[0])==1: seq += [Linear(dim, 1)] elif (st_ed[1]-st_ed[0])==2: seq", "DataTransformer(train_data=train_data, categorical_list=categorical, mixed_dict=mixed) self.transformer.fit() train_data = self.transformer.transform(train_data.values) data_sampler = Sampler(train_data, self.transformer.output_info) data_dim =", "None: st_ed= get_st_ed(target_index,self.transformer.output_info) classifier = Classifier(data_dim,self.class_dim,st_ed).to(self.device) optimizerC = optim.Adam(classifier.parameters(),**optimizer_params) self.generator.apply(weights_init) discriminator.apply(weights_init) self.Gtransformer =", "= torch.from_numpy(m).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) optimizerG.zero_grad() fake =", "st + item[0] counter += 1 self.model.append(np.argmax(data[:, st:ed], axis=-1)) st = ed self.interval", "range(steps): noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample(self.batch_size) c = condvec c", "None: idx = np.random.choice(np.arange(self.n), n) return self.data[idx] idx = [] for c, o", "prev, curr in zip(reversed(layer_dims), reversed(layer_dims[:-1])): layers_G += [ BatchNorm2d(prev[0]), ReLU(True), ConvTranspose2d(prev[0], curr[0], 4,", "st = ed def sample(self, n, col, opt): if col is None: idx", "maximum_interval(output_info): max_interval = 0 for item in output_info: max_interval = max(max_interval, item[0]) return", "def cond_loss(data, output_info, c, m): loss = [] st = 0 st_c =", "tmp_sampling = np.sum(data[:, st:ed], axis=0) tmp = np.log(tmp + 1) tmp = tmp", "dtype='float32') mask = np.zeros((batch, self.n_col), dtype='float32') mask[np.arange(batch), idx] = 1 opt1prime = random_choice_prob_index(self.p[idx])", "= np.random.choice(np.arange(self.n_col), batch) vec = np.zeros((batch, self.n_opt), dtype='float32') opt1prime = random_choice_prob_index_sampling(self.p_sampling,idx) for i", "= F.cross_entropy( data[:, st:ed], torch.argmax(c[:, st_c:ed_c], dim=1), reduction='none') loss.append(tmp) st = ed st_c", "0 for item in output_info: if item[1] == 'tanh': ed = st +", "= [(1, side), (num_channels, side // 2)] while layer_dims[-1][1] > 3 and len(layer_dims)", "for item in output_info: if item[1] == 'tanh': ed = st + item[0]", "side, layers): super(Generator, self).__init__() self.side = side self.seq = Sequential(*layers) def forward(self, input_):", "(st_ed[1] - st_ed[0])==1: c_loss= SmoothL1Loss() real_label = real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) real_label =", "noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) perm = np.arange(self.batch_size) np.random.shuffle(perm) real = data_sampler.sample(self.batch_size, col[perm], opt[perm]) c_perm", "return layers_G def weights_init(m): classname = m.__class__.__name__ if classname.find('Conv') != -1: init.normal_(m.weight.data, 0.0,", "st_c = 0 for item in output_info: if item[1] == 'tanh': st +=", "random_dim, num_channels): assert side >= 4 and side <= 32 layer_dims = [(1,", "Sequential(*layers[:info]) def forward(self, input): return (self.seq(input)), self.seq_info(input) class Generator(Module): def __init__(self, side, layers):", "c, m): loss = [] st = 0 st_c = 0 for item", ":item[0]] = tmp self.interval.append((self.n_opt, item[0])) self.n_opt += item[0] self.n_col += 1 st =", "torch.stack(loss, dim=1) return (loss * m).sum() / data.size()[0] class Sampler(object): def __init__(self, data,", "self.seq = Sequential(*layers) self.seq_info = Sequential(*layers[:info]) def forward(self, input): return (self.seq(input)), self.seq_info(input) class", "for prev, curr in zip(layer_dims, layer_dims[1:]): layers_D += [ Conv2d(prev[0], curr[0], 4, 2,", "curr[0], 4, 2, 1, bias=False), BatchNorm2d(curr[0]), LeakyReLU(0.2, inplace=True) ] print() layers_D += [", "self.random_dim, device=self.device) condvec = self.cond_generator.sample_train(self.batch_size) c, m, col, opt = condvec c =", "= epochs self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") def fit(self, train_data=pd.DataFrame, categorical=[],", "fake_cat = torch.cat([fakeact, c], dim=1) fake_cat = self.Dtransformer.transform(fake_cat) y_fake,info_fake = discriminator(fake_cat) cross_entropy =", "self.seq = Sequential(*layers) def forward(self, input_): return self.seq(input_) def determine_layers_disc(side, num_channels): assert side", "l2scale=1e-5, batch_size=500, epochs=1): self.random_dim = random_dim self.class_dim = class_dim self.num_channels = num_channels self.dside", "st = ed elif item[1] == 'softmax': ed = st + item[0] data_t.append(F.gumbel_softmax(data[:,", "max_interval = max(max_interval, item[0]) return max_interval class Cond(object): def __init__(self, data, output_info): self.model", "def sample(self, n): self.generator.eval() output_info = self.transformer.output_info steps = n // self.batch_size +", "= self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket,output_info) data.append(fakeact.detach().cpu().numpy()) data = np.concatenate(data, axis=0) result = self.transformer.inverse_transform(data)", "weight_decay=self.l2scale) optimizerG = Adam(self.generator.parameters(), **optimizer_params) optimizerD = Adam(discriminator.parameters(), **optimizer_params) st_ed = None classifier=None", "idx, opt1prime def sample(self, batch): if self.n_col == 0: return None batch =", "-1: init.normal_(m.weight.data, 0.0, 0.02) elif classname.find('BatchNorm') != -1: init.normal_(m.weight.data, 1.0, 0.02) init.constant_(m.bias.data, 0)", "fake_label = classifier(fakeact) c_loss = CrossEntropyLoss() if (st_ed[1] - st_ed[0])==1: c_loss= SmoothL1Loss() real_label", "np.zeros((batch, self.n_opt), dtype='float32') mask = np.zeros((batch, self.n_col), dtype='float32') mask[np.arange(batch), idx] = 1 opt1prime", "self.cond_generator.sample_train(self.batch_size) c, m, col, opt = condvec c = torch.from_numpy(c).to(self.device) m = torch.from_numpy(m).to(self.device)", "item[1] == 'softmax': ed = st + item[0] counter += 1 self.model.append(np.argmax(data[:, st:ed],", "loss_std = torch.norm(torch.std(info_fake.view(self.batch_size,-1), dim=0) - torch.std(info_real.view(self.batch_size,-1), dim=0), 1) loss_info = loss_mean + loss_std", "ed = st + item[0] counter += 1 self.model.append(np.argmax(data[:, st:ed], axis=-1)) st =", "[] for c, o in zip(col, opt): idx.append(np.random.choice(self.model[c][o])) return self.data[idx] class Discriminator(Module): def", "idx = [] for c, o in zip(col, opt): idx.append(np.random.choice(self.model[c][o])) return self.data[idx] class", "= np.sum(data[:, st:ed], axis=0) tmp_sampling = np.sum(data[:, st:ed], axis=0) tmp = np.log(tmp +", "= determine_layers_gen(self.gside, self.random_dim+self.cond_generator.n_opt, self.num_channels) layers_D = determine_layers_disc(self.dside, self.num_channels) self.generator = Generator(self.gside, layers_G).to(self.device) discriminator", "= np.random.choice(np.arange(self.n_col), batch) vec = np.zeros((batch, self.n_opt), dtype='float32') mask = np.zeros((batch, self.n_col), dtype='float32')", "for item in output_info: max_interval = max(max_interval, item[0]) return max_interval class Cond(object): def", "get_st_ed(target_index,self.transformer.output_info) classifier = Classifier(data_dim,self.class_dim,st_ed).to(self.device) optimizerC = optim.Adam(classifier.parameters(),**optimizer_params) self.generator.apply(weights_init) discriminator.apply(weights_init) self.Gtransformer = ImageTransformer(self.gside) self.Dtransformer", "def get_st_ed(target_col_index,output_info): st = 0 c= 0 tc= 0 for item in output_info:", "= [] for i in range(steps): noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec =", "= Sequential(*seq) def forward(self, input): label=None if (self.str_end[1]-self.str_end[0])==1: label = input[:, self.str_end[0]:self.str_end[1]] else:", "len(layers)-2 self.seq = Sequential(*layers) self.seq_info = Sequential(*layers[:info]) def forward(self, input): return (self.seq(input)), self.seq_info(input)", "data self.model = [] self.n = len(data) st = 0 for item in", "torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") def fit(self, train_data=pd.DataFrame, categorical=[], mixed={}, type={}): problem_type =", "item[0] tmp = F.cross_entropy( data[:, st:ed], torch.argmax(c[:, st_c:ed_c], dim=1), reduction='none') loss.append(tmp) st =", "= data_sampler.sample(self.batch_size, col[perm], opt[perm]) c_perm = c[perm] real = torch.from_numpy(real.astype('float32')).to(self.device) fake = self.generator(noisez)", "np.arange(batch): vec[i, self.interval[idx[i], 0] + opt1prime[i]] = 1 return vec def cond_loss(data, output_info,", "= ed def sample(self, n, col, opt): if col is None: idx =", "data, output_info): super(Sampler, self).__init__() self.data = data self.model = [] self.n = len(data)", "sides: if i * i >= col_size_d: self.dside = i break sides =", "ed elif item[1] == 'softmax': ed = st + item[0] data_t.append(F.gumbel_softmax(data[:, st:ed], tau=0.2))", "+= item[0] self.n_col += 1 st = ed self.interval = np.asarray(self.interval) def sample_train(self,", "in zip(layer_dims, layer_dims[1:]): layers_D += [ Conv2d(prev[0], curr[0], 4, 2, 1, bias=False), BatchNorm2d(curr[0]),", "SmoothL1Loss() real_label = real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) real_label = torch.reshape(real_label,real_pre.size()) fake_label = torch.reshape(fake_label,fake_pre.size())", "torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample(self.batch_size) c = condvec c = torch.from_numpy(c).to(self.device) noisez", "= Cond(train_data, self.transformer.output_info) sides = [4, 8, 16, 24, 32] col_size_d = data_dim", "return self.seq(new_imp), label def apply_activate(data, output_info): data_t = [] st = 0 for", "def determine_layers_gen(side, random_dim, num_channels): assert side >= 4 and side <= 32 layer_dims", "<= 32 layer_dims = [(1, side), (num_channels, side // 2)] while layer_dims[-1][1] >", "= BCELoss() real_label = real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) loss_cc = c_loss(real_pre, real_label) loss_cg", "y_real,_ = discriminator(real_cat_d) y_fake,_ = discriminator(fake_cat_d) loss_d = (-(torch.log(y_real + 1e-4).mean()) - (torch.log(1.", "noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample(self.batch_size) c = condvec c =", "= torch.from_numpy(c).to(self.device) m = torch.from_numpy(m).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1)", "fakeact = apply_activate(faket, self.transformer.output_info) fake_cat = torch.cat([fakeact, c], dim=1) fake_cat = self.Dtransformer.transform(fake_cat) y_fake,info_fake", "**optimizer_params) st_ed = None classifier=None optimizerC= None if target_index != None: st_ed= get_st_ed(target_index,self.transformer.output_info)", "ImageTransformer(self.gside) self.Dtransformer = ImageTransformer(self.dside) steps_per_epoch = max(1, len(train_data) // self.batch_size) for i in", "vec[i, self.interval[idx[i], 0] + opt1prime[i]] = 1 return vec def cond_loss(data, output_info, c,", "i >= col_size_g: self.gside = i break layers_G = determine_layers_gen(self.gside, self.random_dim+self.cond_generator.n_opt, self.num_channels) layers_D", "for j in range(item[0]): tmp.append(np.nonzero(data[:, st + j])[0]) self.model.append(tmp) st = ed def", "side >= 4 and side <= 32 layer_dims = [(1, side), (num_channels, side", "num_channels): assert side >= 4 and side <= 32 layer_dims = [(1, side),", "option_list = [] for i in col_idx: pp = probs[i] option_list.append(np.random.choice(np.arange(len(probs[i])), p=pp)) return", "classifier(fakeact) c_loss = CrossEntropyLoss() if (st_ed[1] - st_ed[0])==1: c_loss= SmoothL1Loss() real_label = real_label.type_as(real_pre)", "return max_interval class Cond(object): def __init__(self, data, output_info): self.model = [] st =", "st + item[0] ed_c = st_c + item[0] tmp = F.cross_entropy( data[:, st:ed],", "= num_channels self.dside = None self.gside = None self.l2scale = l2scale self.batch_size =", "self.transformer.output_info) data_dim = self.transformer.output_dim self.cond_generator = Cond(train_data, self.transformer.output_info) sides = [4, 8, 16,", "= np.zeros((batch, self.n_opt), dtype='float32') mask = np.zeros((batch, self.n_col), dtype='float32') mask[np.arange(batch), idx] = 1", "'softmax': st += item[0] c+=1 tc+=1 ed= st+output_info[tc][0] return (st,ed) def random_choice_prob_index_sampling(probs,col_idx): option_list", "!= -1: init.normal_(m.weight.data, 0.0, 0.02) elif classname.find('BatchNorm') != -1: init.normal_(m.weight.data, 1.0, 0.02) init.constant_(m.bias.data,", "in output_info: if c==target_col_index: break if item[1]=='tanh': st += item[0] elif item[1] ==", "= 0 for item in output_info: if item[1] == 'tanh': st += item[0]", "4: layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1] // 2)) layers_D = [] for prev, curr", "Dropout(0.5) ] dim = item if (st_ed[1]-st_ed[0])==1: seq += [Linear(dim, 1)] elif (st_ed[1]-st_ed[0])==2:", "seq += [Linear(dim, 1),Sigmoid()] else: seq += [Linear(dim,(st_ed[1]-st_ed[0]))] self.seq = Sequential(*seq) def forward(self,", "item[0] continue elif item[1] == 'softmax': ed = st + item[0] ed_c =", "y_fake,_ = discriminator(fake_cat_d) loss_d = (-(torch.log(y_real + 1e-4).mean()) - (torch.log(1. - y_fake +", "self.seq(input_) def determine_layers_disc(side, num_channels): assert side >= 4 and side <= 32 layer_dims", "fake_cat = self.Dtransformer.transform(fake_cat) y_fake,info_fake = discriminator(fake_cat) cross_entropy = cond_loss(faket, self.transformer.output_info, c, m) _,info_real", "'softmax': ed = st + item[0] tmp = np.sum(data[:, st:ed], axis=0) tmp_sampling =", "side self.seq = Sequential(*layers) def forward(self, input_): return self.seq(input_) def determine_layers_disc(side, num_channels): assert", "self.n_opt), dtype='float32') mask = np.zeros((batch, self.n_col), dtype='float32') mask[np.arange(batch), idx] = 1 opt1prime =", "> 3 and len(layer_dims) < 4: layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1] // 2)) layers_G", "if type: problem_type = list(type.keys())[0] if problem_type: target_index = train_data.columns.get_loc(type[problem_type]) self.transformer = DataTransformer(train_data=train_data,", "apply_activate(faket, self.transformer.output_info) real_pre, real_label = classifier(real) fake_pre, fake_label = classifier(fakeact) c_loss = CrossEntropyLoss()", "loss = [] st = 0 st_c = 0 for item in output_info:", "= classifier(real) fake_pre, fake_label = classifier(fakeact) c_loss = CrossEntropyLoss() if (st_ed[1] - st_ed[0])==1:", "import (Dropout, LeakyReLU, Linear, Module, ReLU, Sequential, Conv2d, ConvTranspose2d, BatchNorm2d, Sigmoid, init, BCELoss,", "= torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake)", "for prev, curr in zip(reversed(layer_dims), reversed(layer_dims[:-1])): layers_G += [ BatchNorm2d(prev[0]), ReLU(True), ConvTranspose2d(prev[0], curr[0],", "Conv2d(prev[0], curr[0], 4, 2, 1, bias=False), BatchNorm2d(curr[0]), LeakyReLU(0.2, inplace=True) ] print() layers_D +=", "= self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) fake_cat = torch.cat([fakeact, c], dim=1) real_cat =", "if item[1] == 'tanh': st += item[0] continue elif item[1] == 'softmax': ed", "max_interval class Cond(object): def __init__(self, data, output_info): self.model = [] st = 0", "from torch.nn import (Dropout, LeakyReLU, Linear, Module, ReLU, Sequential, Conv2d, ConvTranspose2d, BatchNorm2d, Sigmoid,", "1 opt1prime = random_choice_prob_index(self.p[idx]) for i in np.arange(batch): vec[i, self.interval[idx[i], 0] + opt1prime[i]]", "- y_fake + 1e-4).mean())) loss_d.backward() optimizerD.step() noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec =", "st = 0 self.p = np.zeros((counter, maximum_interval(output_info))) self.p_sampling = [] for item in", "real_label = real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) loss_cc = c_loss(real_pre, real_label) loss_cg = c_loss(fake_pre,", "init.normal_(m.weight.data, 1.0, 0.02) init.constant_(m.bias.data, 0) class CTABGANSynthesizer: def __init__(self, class_dim=(256, 256, 256, 256),", "layers_G += [ BatchNorm2d(prev[0]), ReLU(True), ConvTranspose2d(prev[0], curr[0], 4, 2, 1, output_padding=0, bias=True) ]", "data_dim for i in sides: if i * i >= col_size_g: self.gside =", "fake_label = fake_label.type_as(fake_pre) real_label = torch.reshape(real_label,real_pre.size()) fake_label = torch.reshape(fake_label,fake_pre.size()) elif (st_ed[1] - st_ed[0])==2:", "self.n_opt), dtype='float32') opt1prime = random_choice_prob_index_sampling(self.p_sampling,idx) for i in np.arange(batch): vec[i, self.interval[idx[i], 0] +", "loss_cg = c_loss(fake_pre, fake_label) optimizerG.zero_grad() loss_cg.backward() optimizerG.step() optimizerC.zero_grad() loss_cc.backward() optimizerC.step() def sample(self, n):", "m, col, opt = condvec c = torch.from_numpy(c).to(self.device) m = torch.from_numpy(m).to(self.device) noisez =", "fake_label.type_as(fake_pre) loss_cc = c_loss(real_pre, real_label) loss_cg = c_loss(fake_pre, fake_label) optimizerG.zero_grad() loss_cg.backward() optimizerG.step() optimizerC.zero_grad()", "as optim from torch.optim import Adam from torch.nn import functional as F from", "= apply_activate(faket, self.transformer.output_info) fake_cat = torch.cat([fakeact, c], dim=1) real_cat = torch.cat([real, c_perm], dim=1)", "(loss * m).sum() / data.size()[0] class Sampler(object): def __init__(self, data, output_info): super(Sampler, self).__init__()", "i in range(steps): noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample(self.batch_size) c =", "CrossEntropyLoss,SmoothL1Loss) from model.synthesizer.transformer import ImageTransformer,DataTransformer from tqdm import tqdm class Classifier(Module): def __init__(self,input_dim,", "(st,ed) def random_choice_prob_index_sampling(probs,col_idx): option_list = [] for i in col_idx: pp = probs[i]", "from torch.nn import functional as F from torch.nn import (Dropout, LeakyReLU, Linear, Module,", "+ self.cond_generator.n_opt for i in sides: if i * i >= col_size_d: self.dside", "idx = np.random.choice(np.arange(self.n_col), batch) vec = np.zeros((batch, self.n_opt), dtype='float32') mask = np.zeros((batch, self.n_col),", "torch.cuda.is_available() else \"cpu\") def fit(self, train_data=pd.DataFrame, categorical=[], mixed={}, type={}): problem_type = None target_index=None", "2, 1, bias=False), BatchNorm2d(curr[0]), LeakyReLU(0.2, inplace=True) ] print() layers_D += [ Conv2d(layer_dims[-1][0], 1,", "= torch.from_numpy(m).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) perm = np.arange(self.batch_size)", "st = ed st_c = ed_c loss = torch.stack(loss, dim=1) return (loss *", "= discriminator(fake_cat) cross_entropy = cond_loss(faket, self.transformer.output_info, c, m) _,info_real = discriminator(real_cat_d) g =", "= None classifier=None optimizerC= None if target_index != None: st_ed= get_st_ed(target_index,self.transformer.output_info) classifier =", "4, 2, 1, bias=False), BatchNorm2d(curr[0]), LeakyReLU(0.2, inplace=True) ] print() layers_D += [ Conv2d(layer_dims[-1][0],", "if problem_type: target_index = train_data.columns.get_loc(type[problem_type]) self.transformer = DataTransformer(train_data=train_data, categorical_list=categorical, mixed_dict=mixed) self.transformer.fit() train_data =", "= probs[i] option_list.append(np.random.choice(np.arange(len(probs[i])), p=pp)) return np.array(option_list).reshape(col_idx.shape) def random_choice_prob_index(a, axis=1): r = np.expand_dims(np.random.rand(a.shape[1 -", "dim = item if (st_ed[1]-st_ed[0])==1: seq += [Linear(dim, 1)] elif (st_ed[1]-st_ed[0])==2: seq +=", "2)] while layer_dims[-1][1] > 3 and len(layer_dims) < 4: layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1]", "self).__init__() self.side = side self.seq = Sequential(*layers) def forward(self, input_): return self.seq(input_) def", "self.Dtransformer = ImageTransformer(self.dside) steps_per_epoch = max(1, len(train_data) // self.batch_size) for i in tqdm(range(self.epochs)):", "idx.append(np.random.choice(self.model[c][o])) return self.data[idx] class Discriminator(Module): def __init__(self, side, layers): super(Discriminator, self).__init__() self.side =", "16, 24, 32] col_size_g = data_dim for i in sides: if i *", "item[1] == 'softmax': st += item[0] c+=1 tc+=1 ed= st+output_info[tc][0] return (st,ed) def", "bias=False), BatchNorm2d(curr[0]), LeakyReLU(0.2, inplace=True) ] print() layers_D += [ Conv2d(layer_dims[-1][0], 1, layer_dims[-1][1], 1,", "+ item[0] data_t.append(torch.tanh(data[:, st:ed])) st = ed elif item[1] == 'softmax': ed =", "torch.nn import (Dropout, LeakyReLU, Linear, Module, ReLU, Sequential, Conv2d, ConvTranspose2d, BatchNorm2d, Sigmoid, init,", "determine_layers_disc(side, num_channels): assert side >= 4 and side <= 32 layer_dims = [(1,", "= torch.reshape(fake_label,fake_pre.size()) elif (st_ed[1] - st_ed[0])==2: c_loss = BCELoss() real_label = real_label.type_as(real_pre) fake_label", "real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) real_label = torch.reshape(real_label,real_pre.size()) fake_label = torch.reshape(fake_label,fake_pre.size()) elif (st_ed[1] -", "if self.n_col == 0: return None batch = batch idx = np.random.choice(np.arange(self.n_col), batch)", "loss = torch.stack(loss, dim=1) return (loss * m).sum() / data.size()[0] class Sampler(object): def", "seq += [Linear(dim,(st_ed[1]-st_ed[0]))] self.seq = Sequential(*seq) def forward(self, input): label=None if (self.str_end[1]-self.str_end[0])==1: label", "in output_info: max_interval = max(max_interval, item[0]) return max_interval class Cond(object): def __init__(self, data,", "if item[1] == 'tanh': ed = st + item[0] data_t.append(torch.tanh(data[:, st:ed])) st =", "for item in output_info: if c==target_col_index: break if item[1]=='tanh': st += item[0] elif", "vec = np.zeros((batch, self.n_opt), dtype='float32') mask = np.zeros((batch, self.n_col), dtype='float32') mask[np.arange(batch), idx] =", "8, 16, 24, 32] col_size_g = data_dim for i in sides: if i", "optimizerC= None if target_index != None: st_ed= get_st_ed(target_index,self.transformer.output_info) classifier = Classifier(data_dim,self.class_dim,st_ed).to(self.device) optimizerC =", "\"cpu\") def fit(self, train_data=pd.DataFrame, categorical=[], mixed={}, type={}): problem_type = None target_index=None if type:", "= np.log(tmp + 1) tmp = tmp / np.sum(tmp) tmp_sampling = tmp_sampling /", "= i break sides = [4, 8, 16, 24, 32] col_size_g = data_dim", "= ed self.interval = np.asarray(self.interval) def sample_train(self, batch): if self.n_col == 0: return", "tmp = np.sum(data[:, st:ed], axis=0) tmp_sampling = np.sum(data[:, st:ed], axis=0) tmp = np.log(tmp", "self.seq(new_imp).view(-1), label else: return self.seq(new_imp), label def apply_activate(data, output_info): data_t = [] st", "counter += 1 self.model.append(np.argmax(data[:, st:ed], axis=-1)) st = ed self.interval = [] self.n_col", "return self.data[idx] idx = [] for c, o in zip(col, opt): idx.append(np.random.choice(self.model[c][o])) return", "LeakyReLU, Linear, Module, ReLU, Sequential, Conv2d, ConvTranspose2d, BatchNorm2d, Sigmoid, init, BCELoss, CrossEntropyLoss,SmoothL1Loss) from", "st_ed[0])==1: c_loss= SmoothL1Loss() real_label = real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) real_label = torch.reshape(real_label,real_pre.size()) fake_label", "2, 1, output_padding=0, bias=True) ] return layers_G def weights_init(m): classname = m.__class__.__name__ if", "+ item[0] ed_c = st_c + item[0] tmp = F.cross_entropy( data[:, st:ed], torch.argmax(c[:,", "tmp / np.sum(tmp) tmp_sampling = tmp_sampling / np.sum(tmp_sampling) self.p_sampling.append(tmp_sampling) self.p[self.n_col, :item[0]] = tmp", "= optim.Adam(classifier.parameters(),**optimizer_params) self.generator.apply(weights_init) discriminator.apply(weights_init) self.Gtransformer = ImageTransformer(self.gside) self.Dtransformer = ImageTransformer(self.dside) steps_per_epoch = max(1,", "ed def sample(self, n, col, opt): if col is None: idx = np.random.choice(np.arange(self.n),", "classifier = Classifier(data_dim,self.class_dim,st_ed).to(self.device) optimizerC = optim.Adam(classifier.parameters(),**optimizer_params) self.generator.apply(weights_init) discriminator.apply(weights_init) self.Gtransformer = ImageTransformer(self.gside) self.Dtransformer =", "(Dropout, LeakyReLU, Linear, Module, ReLU, Sequential, Conv2d, ConvTranspose2d, BatchNorm2d, Sigmoid, init, BCELoss, CrossEntropyLoss,SmoothL1Loss)", "1e-4).mean()) - (torch.log(1. - y_fake + 1e-4).mean())) loss_d.backward() optimizerD.step() noisez = torch.randn(self.batch_size, self.random_dim,", "opt1prime[i]] = 1 return vec def cond_loss(data, output_info, c, m): loss = []", "return (self.seq(input)), self.seq_info(input) class Generator(Module): def __init__(self, side, layers): super(Generator, self).__init__() self.side =", "for item in list(dis_dims): seq += [ Linear(dim, item), LeakyReLU(0.2), Dropout(0.5) ] dim", "np.arange(batch): vec[i, self.interval[idx[i], 0] + opt1prime[i]] = 1 return vec, mask, idx, opt1prime", "+ 1e-4).mean()) - (torch.log(1. - y_fake + 1e-4).mean())) loss_d.backward() optimizerD.step() noisez = torch.randn(self.batch_size,", "random_choice_prob_index(self.p[idx]) for i in np.arange(batch): vec[i, self.interval[idx[i], 0] + opt1prime[i]] = 1 return", "elif item[1] == 'softmax': ed = st + item[0] tmp = [] for", "in range(steps): noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample(self.batch_size) c = condvec", "data_dim = self.transformer.output_dim self.cond_generator = Cond(train_data, self.transformer.output_info) sides = [4, 8, 16, 24,", "i in sides: if i * i >= col_size_d: self.dside = i break", "r = np.expand_dims(np.random.rand(a.shape[1 - axis]), axis=axis) return (a.cumsum(axis=axis) > r).argmax(axis=axis) def maximum_interval(output_info): max_interval", "= torch.cat([real, c_perm], dim=1) real_cat_d = self.Dtransformer.transform(real_cat) fake_cat_d = self.Dtransformer.transform(fake_cat) optimizerD.zero_grad() y_real,_ =", "Linear, Module, ReLU, Sequential, Conv2d, ConvTranspose2d, BatchNorm2d, Sigmoid, init, BCELoss, CrossEntropyLoss,SmoothL1Loss) from model.synthesizer.transformer", "torch.from_numpy(m).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) optimizerG.zero_grad() fake = self.generator(noisez)", "= 1 return vec, mask, idx, opt1prime def sample(self, batch): if self.n_col ==", "self.cond_generator = Cond(train_data, self.transformer.output_info) sides = [4, 8, 16, 24, 32] col_size_d =", "= [] st = 0 st_c = 0 for item in output_info: if", "= ImageTransformer(self.dside) steps_per_epoch = max(1, len(train_data) // self.batch_size) for i in tqdm(range(self.epochs)): for", "class Sampler(object): def __init__(self, data, output_info): super(Sampler, self).__init__() self.data = data self.model =", "= 0 counter = 0 for item in output_info: if item[1] == 'tanh':", "ed = st + item[0] tmp = [] for j in range(item[0]): tmp.append(np.nonzero(data[:,", "BCELoss() real_label = real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) loss_cc = c_loss(real_pre, real_label) loss_cg =", "n // self.batch_size + 1 data = [] for i in range(steps): noisez", "= len(layers)-2 self.seq = Sequential(*layers) self.seq_info = Sequential(*layers[:info]) def forward(self, input): return (self.seq(input)),", "curr[0], 4, 2, 1, output_padding=0, bias=True) ] return layers_G def weights_init(m): classname =", "= [] for c, o in zip(col, opt): idx.append(np.random.choice(self.model[c][o])) return self.data[idx] class Discriminator(Module):", "classifier=None optimizerC= None if target_index != None: st_ed= get_st_ed(target_index,self.transformer.output_info) classifier = Classifier(data_dim,self.class_dim,st_ed).to(self.device) optimizerC", "data_dim + self.cond_generator.n_opt for i in sides: if i * i >= col_size_d:", "sample(self, n): self.generator.eval() output_info = self.transformer.output_info steps = n // self.batch_size + 1", "torch.utils.data import torch.optim as optim from torch.optim import Adam from torch.nn import functional", "sides = [4, 8, 16, 24, 32] col_size_d = data_dim + self.cond_generator.n_opt for", "train_data=pd.DataFrame, categorical=[], mixed={}, type={}): problem_type = None target_index=None if type: problem_type = list(type.keys())[0]", "= data self.model = [] self.n = len(data) st = 0 for item", "= np.asarray(self.interval) def sample_train(self, batch): if self.n_col == 0: return None batch =", "output_info = self.transformer.output_info steps = n // self.batch_size + 1 data = []", "1e-4).mean())) loss_d.backward() optimizerD.step() noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample_train(self.batch_size) c, m,", "fakeact = apply_activate(faket, self.transformer.output_info) real_pre, real_label = classifier(real) fake_pre, fake_label = classifier(fakeact) c_loss", "torch.norm(torch.mean(info_fake.view(self.batch_size,-1), dim=0) - torch.mean(info_real.view(self.batch_size,-1), dim=0), 1) loss_std = torch.norm(torch.std(info_fake.view(self.batch_size,-1), dim=0) - torch.std(info_real.view(self.batch_size,-1), dim=0),", "if i * i >= col_size_g: self.gside = i break layers_G = determine_layers_gen(self.gside,", "self.n_col += 1 st = ed self.interval = np.asarray(self.interval) def sample_train(self, batch): if", "sample_train(self, batch): if self.n_col == 0: return None batch = batch idx =", "batch idx = np.random.choice(np.arange(self.n_col), batch) vec = np.zeros((batch, self.n_opt), dtype='float32') opt1prime = random_choice_prob_index_sampling(self.p_sampling,idx)", "= Generator(self.gside, layers_G).to(self.device) discriminator = Discriminator(self.dside, layers_D).to(self.device) optimizer_params = dict(lr=2e-4, betas=(0.5, 0.9), eps=1e-3,", "item[0] c+=1 tc+=1 ed= st+output_info[tc][0] return (st,ed) def random_choice_prob_index_sampling(probs,col_idx): option_list = [] for", "torch.cat(data_t, dim=1) def get_st_ed(target_col_index,output_info): st = 0 c= 0 tc= 0 for item", "train_data.columns.get_loc(type[problem_type]) self.transformer = DataTransformer(train_data=train_data, categorical_list=categorical, mixed_dict=mixed) self.transformer.fit() train_data = self.transformer.transform(train_data.values) data_sampler = Sampler(train_data,", "layers_G = determine_layers_gen(self.gside, self.random_dim+self.cond_generator.n_opt, self.num_channels) layers_D = determine_layers_disc(self.dside, self.num_channels) self.generator = Generator(self.gside, layers_G).to(self.device)", "functional as F from torch.nn import (Dropout, LeakyReLU, Linear, Module, ReLU, Sequential, Conv2d,", "j in range(item[0]): tmp.append(np.nonzero(data[:, st + j])[0]) self.model.append(tmp) st = ed def sample(self,", "in output_info: if item[1] == 'tanh': ed = st + item[0] data_t.append(torch.tanh(data[:, st:ed]))", "= ed self.interval = [] self.n_col = 0 self.n_opt = 0 st =", "optimizerG.zero_grad() fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) fake_cat =", "break if item[1]=='tanh': st += item[0] elif item[1] == 'softmax': st += item[0]", "= noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) perm = np.arange(self.batch_size) np.random.shuffle(perm) real = data_sampler.sample(self.batch_size, col[perm], opt[perm]) c_perm =", "faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) fake_cat = torch.cat([fakeact, c], dim=1) real_cat", "__init__(self, data, output_info): self.model = [] st = 0 counter = 0 for", "0.02) elif classname.find('BatchNorm') != -1: init.normal_(m.weight.data, 1.0, 0.02) init.constant_(m.bias.data, 0) class CTABGANSynthesizer: def", "class_dim=(256, 256, 256, 256), random_dim=100, num_channels=64, l2scale=1e-5, batch_size=500, epochs=1): self.random_dim = random_dim self.class_dim", "= data_dim for i in sides: if i * i >= col_size_g: self.gside", "print() layers_D += [ Conv2d(layer_dims[-1][0], 1, layer_dims[-1][1], 1, 0), Sigmoid() ] return layers_D", "else: return self.seq(new_imp), label def apply_activate(data, output_info): data_t = [] st = 0", "fakeact = apply_activate(faket,output_info) data.append(fakeact.detach().cpu().numpy()) data = np.concatenate(data, axis=0) result = self.transformer.inverse_transform(data) return result[0:n]", "class Discriminator(Module): def __init__(self, side, layers): super(Discriminator, self).__init__() self.side = side info =", "noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) perm = np.arange(self.batch_size) np.random.shuffle(perm) real", "torch.cat([fakeact, c], dim=1) fake_cat = self.Dtransformer.transform(fake_cat) y_fake,info_fake = discriminator(fake_cat) cross_entropy = cond_loss(faket, self.transformer.output_info,", "LeakyReLU(0.2, inplace=True) ] print() layers_D += [ Conv2d(layer_dims[-1][0], 1, layer_dims[-1][1], 1, 0), Sigmoid()", "def __init__(self, data, output_info): self.model = [] st = 0 counter = 0", "side), (num_channels, side // 2)] while layer_dims[-1][1] > 3 and len(layer_dims) < 4:", "dim=0) - torch.mean(info_real.view(self.batch_size,-1), dim=0), 1) loss_std = torch.norm(torch.std(info_fake.view(self.batch_size,-1), dim=0) - torch.std(info_real.view(self.batch_size,-1), dim=0), 1)", "ed return torch.cat(data_t, dim=1) def get_st_ed(target_col_index,output_info): st = 0 c= 0 tc= 0", "0 for item in output_info: if c==target_col_index: break if item[1]=='tanh': st += item[0]", "+ 1) tmp = tmp / np.sum(tmp) tmp_sampling = tmp_sampling / np.sum(tmp_sampling) self.p_sampling.append(tmp_sampling)", "(num_channels, side // 2)] while layer_dims[-1][1] > 3 and len(layer_dims) < 4: layer_dims.append((layer_dims[-1][0]", "dict(lr=2e-4, betas=(0.5, 0.9), eps=1e-3, weight_decay=self.l2scale) optimizerG = Adam(self.generator.parameters(), **optimizer_params) optimizerD = Adam(discriminator.parameters(), **optimizer_params)", "self.Dtransformer.transform(real_cat) fake_cat_d = self.Dtransformer.transform(fake_cat) optimizerD.zero_grad() y_real,_ = discriminator(real_cat_d) y_fake,_ = discriminator(fake_cat_d) loss_d =", "c = condvec c = torch.from_numpy(c).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez =", "[] for i in col_idx: pp = probs[i] option_list.append(np.random.choice(np.arange(len(probs[i])), p=pp)) return np.array(option_list).reshape(col_idx.shape) def", "Conv2d(layer_dims[-1][0], 1, layer_dims[-1][1], 1, 0), Sigmoid() ] return layers_D def determine_layers_gen(side, random_dim, num_channels):", "discriminator(fake_cat) cross_entropy = cond_loss(faket, self.transformer.output_info, c, m) _,info_real = discriminator(real_cat_d) g = -(torch.log(y_fake", "pandas as pd import torch import torch.utils.data import torch.optim as optim from torch.optim", "= self.transformer.output_info steps = n // self.batch_size + 1 data = [] for", "[Linear(dim, 1)] elif (st_ed[1]-st_ed[0])==2: seq += [Linear(dim, 1),Sigmoid()] else: seq += [Linear(dim,(st_ed[1]-st_ed[0]))] self.seq", "ed = st + item[0] data_t.append(torch.tanh(data[:, st:ed])) st = ed elif item[1] ==", "dis_dims,st_ed): super(Classifier,self).__init__() dim = input_dim-(st_ed[1]-st_ed[0]) seq = [] self.str_end = st_ed for item", "= cond_loss(faket, self.transformer.output_info, c, m) _,info_real = discriminator(real_cat_d) g = -(torch.log(y_fake + 1e-4).mean())", "if torch.cuda.is_available() else \"cpu\") def fit(self, train_data=pd.DataFrame, categorical=[], mixed={}, type={}): problem_type = None", "self).__init__() self.side = side info = len(layers)-2 self.seq = Sequential(*layers) self.seq_info = Sequential(*layers[:info])", "layers_D = determine_layers_disc(self.dside, self.num_channels) self.generator = Generator(self.gside, layers_G).to(self.device) discriminator = Discriminator(self.dside, layers_D).to(self.device) optimizer_params", "self.transformer = DataTransformer(train_data=train_data, categorical_list=categorical, mixed_dict=mixed) self.transformer.fit() train_data = self.transformer.transform(train_data.values) data_sampler = Sampler(train_data, self.transformer.output_info)", "def sample(self, batch): if self.n_col == 0: return None batch = batch idx", "- (torch.log(1. - y_fake + 1e-4).mean())) loss_d.backward() optimizerD.step() noisez = torch.randn(self.batch_size, self.random_dim, device=self.device)", "type={}): problem_type = None target_index=None if type: problem_type = list(type.keys())[0] if problem_type: target_index", "seq = [] self.str_end = st_ed for item in list(dis_dims): seq += [", "ed_c loss = torch.stack(loss, dim=1) return (loss * m).sum() / data.size()[0] class Sampler(object):", "= max(max_interval, item[0]) return max_interval class Cond(object): def __init__(self, data, output_info): self.model =", "input[:, self.str_end[0]:self.str_end[1]] else: label = torch.argmax(input[:, self.str_end[0]:self.str_end[1]], axis=-1) new_imp = torch.cat((input[:,:self.str_end[0]],input[:,self.str_end[1]:]),1) if ((self.str_end[1]-self.str_end[0])==2)", "item if (st_ed[1]-st_ed[0])==1: seq += [Linear(dim, 1)] elif (st_ed[1]-st_ed[0])==2: seq += [Linear(dim, 1),Sigmoid()]", "self.Dtransformer.transform(fake_cat) optimizerD.zero_grad() y_real,_ = discriminator(real_cat_d) y_fake,_ = discriminator(fake_cat_d) loss_d = (-(torch.log(y_real + 1e-4).mean())", "for i in tqdm(range(self.epochs)): for _ in range(steps_per_epoch): noisez = torch.randn(self.batch_size, self.random_dim, device=self.device)", "[ ConvTranspose2d( random_dim, layer_dims[-1][0], layer_dims[-1][1], 1, 0, output_padding=0, bias=False) ] for prev, curr", "discriminator.apply(weights_init) self.Gtransformer = ImageTransformer(self.gside) self.Dtransformer = ImageTransformer(self.dside) steps_per_epoch = max(1, len(train_data) // self.batch_size)", "self.transformer.output_info) real_pre, real_label = classifier(real) fake_pre, fake_label = classifier(fakeact) c_loss = CrossEntropyLoss() if", "= Sequential(*layers) def forward(self, input_): return self.seq(input_) def determine_layers_disc(side, num_channels): assert side >=", "1) loss_std = torch.norm(torch.std(info_fake.view(self.batch_size,-1), dim=0) - torch.std(info_real.view(self.batch_size,-1), dim=0), 1) loss_info = loss_mean +", "= [] for i in col_idx: pp = probs[i] option_list.append(np.random.choice(np.arange(len(probs[i])), p=pp)) return np.array(option_list).reshape(col_idx.shape)", "c, m) _,info_real = discriminator(real_cat_d) g = -(torch.log(y_fake + 1e-4).mean()) + cross_entropy g.backward(retain_graph=True)", "idx = np.random.choice(np.arange(self.n), n) return self.data[idx] idx = [] for c, o in", "tmp.append(np.nonzero(data[:, st + j])[0]) self.model.append(tmp) st = ed def sample(self, n, col, opt):", "= [] for j in range(item[0]): tmp.append(np.nonzero(data[:, st + j])[0]) self.model.append(tmp) st =", "output_padding=0, bias=False) ] for prev, curr in zip(reversed(layer_dims), reversed(layer_dims[:-1])): layers_G += [ BatchNorm2d(prev[0]),", "(-(torch.log(y_real + 1e-4).mean()) - (torch.log(1. - y_fake + 1e-4).mean())) loss_d.backward() optimizerD.step() noisez =", "super(Sampler, self).__init__() self.data = data self.model = [] self.n = len(data) st =", "== 'softmax': ed = st + item[0] counter += 1 self.model.append(np.argmax(data[:, st:ed], axis=-1))", "Sequential, Conv2d, ConvTranspose2d, BatchNorm2d, Sigmoid, init, BCELoss, CrossEntropyLoss,SmoothL1Loss) from model.synthesizer.transformer import ImageTransformer,DataTransformer from", "init.normal_(m.weight.data, 0.0, 0.02) elif classname.find('BatchNorm') != -1: init.normal_(m.weight.data, 1.0, 0.02) init.constant_(m.bias.data, 0) class", "self.random_dim+self.cond_generator.n_opt, self.num_channels) layers_D = determine_layers_disc(self.dside, self.num_channels) self.generator = Generator(self.gside, layers_G).to(self.device) discriminator = Discriminator(self.dside,", "self.gside = None self.l2scale = l2scale self.batch_size = batch_size self.epochs = epochs self.device", "vec = np.zeros((batch, self.n_opt), dtype='float32') opt1prime = random_choice_prob_index_sampling(self.p_sampling,idx) for i in np.arange(batch): vec[i,", "Sampler(object): def __init__(self, data, output_info): super(Sampler, self).__init__() self.data = data self.model = []", "dim=0) - torch.std(info_real.view(self.batch_size,-1), dim=0), 1) loss_info = loss_mean + loss_std loss_info.backward() optimizerG.step() if", "= c_loss(fake_pre, fake_label) optimizerG.zero_grad() loss_cg.backward() optimizerG.step() optimizerC.zero_grad() loss_cc.backward() optimizerC.step() def sample(self, n): self.generator.eval()", "ReLU, Sequential, Conv2d, ConvTranspose2d, BatchNorm2d, Sigmoid, init, BCELoss, CrossEntropyLoss,SmoothL1Loss) from model.synthesizer.transformer import ImageTransformer,DataTransformer", "m): loss = [] st = 0 st_c = 0 for item in", "+= 1 self.model.append(np.argmax(data[:, st:ed], axis=-1)) st = ed self.interval = [] self.n_col =", "np.sum(data[:, st:ed], axis=0) tmp = np.log(tmp + 1) tmp = tmp / np.sum(tmp)", "== 'tanh': ed = st + item[0] data_t.append(torch.tanh(data[:, st:ed])) st = ed elif", "fakeact = apply_activate(faket, self.transformer.output_info) fake_cat = torch.cat([fakeact, c], dim=1) real_cat = torch.cat([real, c_perm],", "noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) optimizerG.zero_grad() fake = self.generator(noisez) faket", "= tmp_sampling / np.sum(tmp_sampling) self.p_sampling.append(tmp_sampling) self.p[self.n_col, :item[0]] = tmp self.interval.append((self.n_opt, item[0])) self.n_opt +=", "= self.transformer.output_dim self.cond_generator = Cond(train_data, self.transformer.output_info) sides = [4, 8, 16, 24, 32]", "layer_dims[1:]): layers_D += [ Conv2d(prev[0], curr[0], 4, 2, 1, bias=False), BatchNorm2d(curr[0]), LeakyReLU(0.2, inplace=True)", "bias=True) ] return layers_G def weights_init(m): classname = m.__class__.__name__ if classname.find('Conv') != -1:", "classname.find('BatchNorm') != -1: init.normal_(m.weight.data, 1.0, 0.02) init.constant_(m.bias.data, 0) class CTABGANSynthesizer: def __init__(self, class_dim=(256,", "elif item[1] == 'softmax': st += item[0] c+=1 tc+=1 ed= st+output_info[tc][0] return (st,ed)", "classifier(real) fake_pre, fake_label = classifier(fakeact) c_loss = CrossEntropyLoss() if (st_ed[1] - st_ed[0])==1: c_loss=", "self.side = side self.seq = Sequential(*layers) def forward(self, input_): return self.seq(input_) def determine_layers_disc(side,", "self.gside = i break layers_G = determine_layers_gen(self.gside, self.random_dim+self.cond_generator.n_opt, self.num_channels) layers_D = determine_layers_disc(self.dside, self.num_channels)", "[4, 8, 16, 24, 32] col_size_d = data_dim + self.cond_generator.n_opt for i in", "0), Sigmoid() ] return layers_D def determine_layers_gen(side, random_dim, num_channels): assert side >= 4", "apply_activate(faket, self.transformer.output_info) fake_cat = torch.cat([fakeact, c], dim=1) real_cat = torch.cat([real, c_perm], dim=1) real_cat_d", "1 return vec def cond_loss(data, output_info, c, m): loss = [] st =", "in sides: if i * i >= col_size_d: self.dside = i break sides", "4 and side <= 32 layer_dims = [(1, side), (num_channels, side // 2)]", "elif item[1] == 'softmax': ed = st + item[0] counter += 1 self.model.append(np.argmax(data[:,", "optimizerG.step() optimizerC.zero_grad() loss_cc.backward() optimizerC.step() def sample(self, n): self.generator.eval() output_info = self.transformer.output_info steps =", "layers): super(Generator, self).__init__() self.side = side self.seq = Sequential(*layers) def forward(self, input_): return", "st + item[0] tmp = [] for j in range(item[0]): tmp.append(np.nonzero(data[:, st +", "= Sampler(train_data, self.transformer.output_info) data_dim = self.transformer.output_dim self.cond_generator = Cond(train_data, self.transformer.output_info) sides = [4,", "= np.arange(self.batch_size) np.random.shuffle(perm) real = data_sampler.sample(self.batch_size, col[perm], opt[perm]) c_perm = c[perm] real =", "__init__(self, side, layers): super(Generator, self).__init__() self.side = side self.seq = Sequential(*layers) def forward(self,", "32 layer_dims = [(1, side), (num_channels, side // 2)] while layer_dims[-1][1] > 3", "self.num_channels) layers_D = determine_layers_disc(self.dside, self.num_channels) self.generator = Generator(self.gside, layers_G).to(self.device) discriminator = Discriminator(self.dside, layers_D).to(self.device)", "dim=1) real_cat = torch.cat([real, c_perm], dim=1) real_cat_d = self.Dtransformer.transform(real_cat) fake_cat_d = self.Dtransformer.transform(fake_cat) optimizerD.zero_grad()", "> 3 and len(layer_dims) < 4: layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1] // 2)) layers_D", "+= [Linear(dim,(st_ed[1]-st_ed[0]))] self.seq = Sequential(*seq) def forward(self, input): label=None if (self.str_end[1]-self.str_end[0])==1: label =", "forward(self, input): label=None if (self.str_end[1]-self.str_end[0])==1: label = input[:, self.str_end[0]:self.str_end[1]] else: label = torch.argmax(input[:,", "self.transformer.transform(train_data.values) data_sampler = Sampler(train_data, self.transformer.output_info) data_dim = self.transformer.output_dim self.cond_generator = Cond(train_data, self.transformer.output_info) sides", "1, layer_dims[-1][1], 1, 0), Sigmoid() ] return layers_D def determine_layers_gen(side, random_dim, num_channels): assert", "+= [ Linear(dim, item), LeakyReLU(0.2), Dropout(0.5) ] dim = item if (st_ed[1]-st_ed[0])==1: seq", "c, m, col, opt = condvec c = torch.from_numpy(c).to(self.device) m = torch.from_numpy(m).to(self.device) noisez", "dim=0), 1) loss_std = torch.norm(torch.std(info_fake.view(self.batch_size,-1), dim=0) - torch.std(info_real.view(self.batch_size,-1), dim=0), 1) loss_info = loss_mean", "dim=1) real_cat_d = self.Dtransformer.transform(real_cat) fake_cat_d = self.Dtransformer.transform(fake_cat) optimizerD.zero_grad() y_real,_ = discriminator(real_cat_d) y_fake,_ =", "max_interval = 0 for item in output_info: max_interval = max(max_interval, item[0]) return max_interval", "2)) layers_G = [ ConvTranspose2d( random_dim, layer_dims[-1][0], layer_dims[-1][1], 1, 0, output_padding=0, bias=False) ]", "batch idx = np.random.choice(np.arange(self.n_col), batch) vec = np.zeros((batch, self.n_opt), dtype='float32') mask = np.zeros((batch,", "for i in np.arange(batch): vec[i, self.interval[idx[i], 0] + opt1prime[i]] = 1 return vec", "if i * i >= col_size_d: self.dside = i break sides = [4,", "seq += [Linear(dim, 1)] elif (st_ed[1]-st_ed[0])==2: seq += [Linear(dim, 1),Sigmoid()] else: seq +=", "condvec c = torch.from_numpy(c).to(self.device) m = torch.from_numpy(m).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez", "st:ed], tau=0.2)) st = ed return torch.cat(data_t, dim=1) def get_st_ed(target_col_index,output_info): st = 0", "+= item[0] continue elif item[1] == 'softmax': ed = st + item[0] counter", "= fake_label.type_as(fake_pre) loss_cc = c_loss(real_pre, real_label) loss_cg = c_loss(fake_pre, fake_label) optimizerG.zero_grad() loss_cg.backward() optimizerG.step()", "= 0 st = 0 self.p = np.zeros((counter, maximum_interval(output_info))) self.p_sampling = [] for", "c_loss(fake_pre, fake_label) optimizerG.zero_grad() loss_cg.backward() optimizerG.step() optimizerC.zero_grad() loss_cc.backward() optimizerC.step() def sample(self, n): self.generator.eval() output_info", "np.arange(self.batch_size) np.random.shuffle(perm) real = data_sampler.sample(self.batch_size, col[perm], opt[perm]) c_perm = c[perm] real = torch.from_numpy(real.astype('float32')).to(self.device)", "st = ed self.interval = [] self.n_col = 0 self.n_opt = 0 st", "discriminator(real_cat_d) y_fake,_ = discriminator(fake_cat_d) loss_d = (-(torch.log(y_real + 1e-4).mean()) - (torch.log(1. - y_fake", "random_choice_prob_index_sampling(self.p_sampling,idx) for i in np.arange(batch): vec[i, self.interval[idx[i], 0] + opt1prime[i]] = 1 return", "for c, o in zip(col, opt): idx.append(np.random.choice(self.model[c][o])) return self.data[idx] class Discriminator(Module): def __init__(self,", "= Adam(self.generator.parameters(), **optimizer_params) optimizerD = Adam(discriminator.parameters(), **optimizer_params) st_ed = None classifier=None optimizerC= None", "input): return (self.seq(input)), self.seq_info(input) class Generator(Module): def __init__(self, side, layers): super(Generator, self).__init__() self.side", "opt1prime def sample(self, batch): if self.n_col == 0: return None batch = batch", "side // 2)] while layer_dims[-1][1] > 3 and len(layer_dims) < 4: layer_dims.append((layer_dims[-1][0] *", "target_index != None: st_ed= get_st_ed(target_index,self.transformer.output_info) classifier = Classifier(data_dim,self.class_dim,st_ed).to(self.device) optimizerC = optim.Adam(classifier.parameters(),**optimizer_params) self.generator.apply(weights_init) discriminator.apply(weights_init)", "ed self.interval = [] self.n_col = 0 self.n_opt = 0 st = 0", "elif item[1] == 'softmax': ed = st + item[0] tmp = np.sum(data[:, st:ed],", "(st_ed[1]-st_ed[0])==1: seq += [Linear(dim, 1)] elif (st_ed[1]-st_ed[0])==2: seq += [Linear(dim, 1),Sigmoid()] else: seq", "tmp = [] for j in range(item[0]): tmp.append(np.nonzero(data[:, st + j])[0]) self.model.append(tmp) st", "def __init__(self, side, layers): super(Discriminator, self).__init__() self.side = side info = len(layers)-2 self.seq", "np.zeros((batch, self.n_col), dtype='float32') mask[np.arange(batch), idx] = 1 opt1prime = random_choice_prob_index(self.p[idx]) for i in", "c_perm], dim=1) real_cat_d = self.Dtransformer.transform(real_cat) fake_cat_d = self.Dtransformer.transform(fake_cat) optimizerD.zero_grad() y_real,_ = discriminator(real_cat_d) y_fake,_", "item in list(dis_dims): seq += [ Linear(dim, item), LeakyReLU(0.2), Dropout(0.5) ] dim =", "= loss_mean + loss_std loss_info.backward() optimizerG.step() if problem_type: fake = self.generator(noisez) faket =", "st + item[0] tmp = np.sum(data[:, st:ed], axis=0) tmp_sampling = np.sum(data[:, st:ed], axis=0)", "i in col_idx: pp = probs[i] option_list.append(np.random.choice(np.arange(len(probs[i])), p=pp)) return np.array(option_list).reshape(col_idx.shape) def random_choice_prob_index(a, axis=1):", "train_data = self.transformer.transform(train_data.values) data_sampler = Sampler(train_data, self.transformer.output_info) data_dim = self.transformer.output_dim self.cond_generator = Cond(train_data,", "= train_data.columns.get_loc(type[problem_type]) self.transformer = DataTransformer(train_data=train_data, categorical_list=categorical, mixed_dict=mixed) self.transformer.fit() train_data = self.transformer.transform(train_data.values) data_sampler =", "layer_dims[-1][0], layer_dims[-1][1], 1, 0, output_padding=0, bias=False) ] for prev, curr in zip(reversed(layer_dims), reversed(layer_dims[:-1])):", "while layer_dims[-1][1] > 3 and len(layer_dims) < 4: layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1] //", "Cond(train_data, self.transformer.output_info) sides = [4, 8, 16, 24, 32] col_size_d = data_dim +", "Conv2d, ConvTranspose2d, BatchNorm2d, Sigmoid, init, BCELoss, CrossEntropyLoss,SmoothL1Loss) from model.synthesizer.transformer import ImageTransformer,DataTransformer from tqdm", "= random_choice_prob_index(self.p[idx]) for i in np.arange(batch): vec[i, self.interval[idx[i], 0] + opt1prime[i]] = 1", "self.generator = Generator(self.gside, layers_G).to(self.device) discriminator = Discriminator(self.dside, layers_D).to(self.device) optimizer_params = dict(lr=2e-4, betas=(0.5, 0.9),", "for _ in range(steps_per_epoch): noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample_train(self.batch_size) c,", "torch.nn import functional as F from torch.nn import (Dropout, LeakyReLU, Linear, Module, ReLU,", "None batch = batch idx = np.random.choice(np.arange(self.n_col), batch) vec = np.zeros((batch, self.n_opt), dtype='float32')", "super(Generator, self).__init__() self.side = side self.seq = Sequential(*layers) def forward(self, input_): return self.seq(input_)", "= np.zeros((batch, self.n_opt), dtype='float32') opt1prime = random_choice_prob_index_sampling(self.p_sampling,idx) for i in np.arange(batch): vec[i, self.interval[idx[i],", "__init__(self, data, output_info): super(Sampler, self).__init__() self.data = data self.model = [] self.n =", "return (loss * m).sum() / data.size()[0] class Sampler(object): def __init__(self, data, output_info): super(Sampler,", "torch.argmax(c[:, st_c:ed_c], dim=1), reduction='none') loss.append(tmp) st = ed st_c = ed_c loss =", "+ item[0] tmp = F.cross_entropy( data[:, st:ed], torch.argmax(c[:, st_c:ed_c], dim=1), reduction='none') loss.append(tmp) st", "label = input[:, self.str_end[0]:self.str_end[1]] else: label = torch.argmax(input[:, self.str_end[0]:self.str_end[1]], axis=-1) new_imp = torch.cat((input[:,:self.str_end[0]],input[:,self.str_end[1]:]),1)", "st_ed = None classifier=None optimizerC= None if target_index != None: st_ed= get_st_ed(target_index,self.transformer.output_info) classifier", "output_info): self.model = [] st = 0 counter = 0 for item in", "= st + item[0] ed_c = st_c + item[0] tmp = F.cross_entropy( data[:,", "axis=1): r = np.expand_dims(np.random.rand(a.shape[1 - axis]), axis=axis) return (a.cumsum(axis=axis) > r).argmax(axis=axis) def maximum_interval(output_info):", "data.size()[0] class Sampler(object): def __init__(self, data, output_info): super(Sampler, self).__init__() self.data = data self.model", "range(item[0]): tmp.append(np.nonzero(data[:, st + j])[0]) self.model.append(tmp) st = ed def sample(self, n, col,", "= [] self.str_end = st_ed for item in list(dis_dims): seq += [ Linear(dim,", "self.str_end[0]:self.str_end[1]] else: label = torch.argmax(input[:, self.str_end[0]:self.str_end[1]], axis=-1) new_imp = torch.cat((input[:,:self.str_end[0]],input[:,self.str_end[1]:]),1) if ((self.str_end[1]-self.str_end[0])==2) |", "l2scale self.batch_size = batch_size self.epochs = epochs self.device = torch.device(\"cuda:0\" if torch.cuda.is_available() else", "**optimizer_params) optimizerD = Adam(discriminator.parameters(), **optimizer_params) st_ed = None classifier=None optimizerC= None if target_index", "self.n_opt = 0 st = 0 self.p = np.zeros((counter, maximum_interval(output_info))) self.p_sampling = []", "cond_loss(data, output_info, c, m): loss = [] st = 0 st_c = 0", "model.synthesizer.transformer import ImageTransformer,DataTransformer from tqdm import tqdm class Classifier(Module): def __init__(self,input_dim, dis_dims,st_ed): super(Classifier,self).__init__()", "i break sides = [4, 8, 16, 24, 32] col_size_g = data_dim for", "self.batch_size) for i in tqdm(range(self.epochs)): for _ in range(steps_per_epoch): noisez = torch.randn(self.batch_size, self.random_dim,", "st += item[0] c+=1 tc+=1 ed= st+output_info[tc][0] return (st,ed) def random_choice_prob_index_sampling(probs,col_idx): option_list =", "st_c + item[0] tmp = F.cross_entropy( data[:, st:ed], torch.argmax(c[:, st_c:ed_c], dim=1), reduction='none') loss.append(tmp)", "st_c:ed_c], dim=1), reduction='none') loss.append(tmp) st = ed st_c = ed_c loss = torch.stack(loss,", "random_choice_prob_index(a, axis=1): r = np.expand_dims(np.random.rand(a.shape[1 - axis]), axis=axis) return (a.cumsum(axis=axis) > r).argmax(axis=axis) def", "self.interval[idx[i], 0] + opt1prime[i]] = 1 return vec, mask, idx, opt1prime def sample(self,", "c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact =", "r).argmax(axis=axis) def maximum_interval(output_info): max_interval = 0 for item in output_info: max_interval = max(max_interval,", "ed = st + item[0] ed_c = st_c + item[0] tmp = F.cross_entropy(", "32] col_size_d = data_dim + self.cond_generator.n_opt for i in sides: if i *", "def sample(self, n, col, opt): if col is None: idx = np.random.choice(np.arange(self.n), n)", "torch.cat([real, c_perm], dim=1) real_cat_d = self.Dtransformer.transform(real_cat) fake_cat_d = self.Dtransformer.transform(fake_cat) optimizerD.zero_grad() y_real,_ = discriminator(real_cat_d)", "[4, 8, 16, 24, 32] col_size_g = data_dim for i in sides: if", "= torch.cat([fakeact, c], dim=1) real_cat = torch.cat([real, c_perm], dim=1) real_cat_d = self.Dtransformer.transform(real_cat) fake_cat_d", "epochs=1): self.random_dim = random_dim self.class_dim = class_dim self.num_channels = num_channels self.dside = None", "def random_choice_prob_index(a, axis=1): r = np.expand_dims(np.random.rand(a.shape[1 - axis]), axis=axis) return (a.cumsum(axis=axis) > r).argmax(axis=axis)", "= condvec c = torch.from_numpy(c).to(self.device) noisez = torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1)", "= Discriminator(self.dside, layers_D).to(self.device) optimizer_params = dict(lr=2e-4, betas=(0.5, 0.9), eps=1e-3, weight_decay=self.l2scale) optimizerG = Adam(self.generator.parameters(),", "st = ed self.interval = np.asarray(self.interval) def sample_train(self, batch): if self.n_col == 0:", "0 st_c = 0 for item in output_info: if item[1] == 'tanh': st", "st:ed])) st = ed elif item[1] == 'softmax': ed = st + item[0]", "assert side >= 4 and side <= 32 layer_dims = [(1, side), (num_channels,", "0 st = 0 self.p = np.zeros((counter, maximum_interval(output_info))) self.p_sampling = [] for item", "output_info: if c==target_col_index: break if item[1]=='tanh': st += item[0] elif item[1] == 'softmax':", "device=self.device) condvec = self.cond_generator.sample(self.batch_size) c = condvec c = torch.from_numpy(c).to(self.device) noisez = torch.cat([noisez,", "= Sequential(*layers[:info]) def forward(self, input): return (self.seq(input)), self.seq_info(input) class Generator(Module): def __init__(self, side,", "self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) fake_cat = torch.cat([fakeact, c], dim=1) fake_cat = self.Dtransformer.transform(fake_cat)", "return None batch = batch idx = np.random.choice(np.arange(self.n_col), batch) vec = np.zeros((batch, self.n_opt),", "tau=0.2)) st = ed return torch.cat(data_t, dim=1) def get_st_ed(target_col_index,output_info): st = 0 c=", "= CrossEntropyLoss() if (st_ed[1] - st_ed[0])==1: c_loss= SmoothL1Loss() real_label = real_label.type_as(real_pre) fake_label =", "fake_label = fake_label.type_as(fake_pre) loss_cc = c_loss(real_pre, real_label) loss_cg = c_loss(fake_pre, fake_label) optimizerG.zero_grad() loss_cg.backward()", "import ImageTransformer,DataTransformer from tqdm import tqdm class Classifier(Module): def __init__(self,input_dim, dis_dims,st_ed): super(Classifier,self).__init__() dim", "= st_c + item[0] tmp = F.cross_entropy( data[:, st:ed], torch.argmax(c[:, st_c:ed_c], dim=1), reduction='none')", "in range(item[0]): tmp.append(np.nonzero(data[:, st + j])[0]) self.model.append(tmp) st = ed def sample(self, n,", "item[1]=='tanh': st += item[0] elif item[1] == 'softmax': st += item[0] c+=1 tc+=1", "= discriminator(real_cat_d) y_fake,_ = discriminator(fake_cat_d) loss_d = (-(torch.log(y_real + 1e-4).mean()) - (torch.log(1. -", "faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket,output_info) data.append(fakeact.detach().cpu().numpy()) data = np.concatenate(data, axis=0) result =", "c_perm = c[perm] real = torch.from_numpy(real.astype('float32')).to(self.device) fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact", "range(steps_per_epoch): noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample_train(self.batch_size) c, m, col, opt", "optimizerD.zero_grad() y_real,_ = discriminator(real_cat_d) y_fake,_ = discriminator(fake_cat_d) loss_d = (-(torch.log(y_real + 1e-4).mean()) -", "random_dim=100, num_channels=64, l2scale=1e-5, batch_size=500, epochs=1): self.random_dim = random_dim self.class_dim = class_dim self.num_channels =", "self.l2scale = l2scale self.batch_size = batch_size self.epochs = epochs self.device = torch.device(\"cuda:0\" if", "side <= 32 layer_dims = [(1, side), (num_channels, side // 2)] while layer_dims[-1][1]", "st += item[0] elif item[1] == 'softmax': st += item[0] c+=1 tc+=1 ed=", "= dict(lr=2e-4, betas=(0.5, 0.9), eps=1e-3, weight_decay=self.l2scale) optimizerG = Adam(self.generator.parameters(), **optimizer_params) optimizerD = Adam(discriminator.parameters(),", "1) loss_info = loss_mean + loss_std loss_info.backward() optimizerG.step() if problem_type: fake = self.generator(noisez)", ">= col_size_d: self.dside = i break sides = [4, 8, 16, 24, 32]", "dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) perm = np.arange(self.batch_size) np.random.shuffle(perm) real = data_sampler.sample(self.batch_size, col[perm], opt[perm])", "init, BCELoss, CrossEntropyLoss,SmoothL1Loss) from model.synthesizer.transformer import ImageTransformer,DataTransformer from tqdm import tqdm class Classifier(Module):", "- torch.mean(info_real.view(self.batch_size,-1), dim=0), 1) loss_std = torch.norm(torch.std(info_fake.view(self.batch_size,-1), dim=0) - torch.std(info_real.view(self.batch_size,-1), dim=0), 1) loss_info", "if col is None: idx = np.random.choice(np.arange(self.n), n) return self.data[idx] idx = []", "= np.zeros((batch, self.n_col), dtype='float32') mask[np.arange(batch), idx] = 1 opt1prime = random_choice_prob_index(self.p[idx]) for i", "= self.transformer.transform(train_data.values) data_sampler = Sampler(train_data, self.transformer.output_info) data_dim = self.transformer.output_dim self.cond_generator = Cond(train_data, self.transformer.output_info)", "- torch.std(info_real.view(self.batch_size,-1), dim=0), 1) loss_info = loss_mean + loss_std loss_info.backward() optimizerG.step() if problem_type:", "256, 256), random_dim=100, num_channels=64, l2scale=1e-5, batch_size=500, epochs=1): self.random_dim = random_dim self.class_dim = class_dim", "elif (st_ed[1]-st_ed[0])==2: seq += [Linear(dim, 1),Sigmoid()] else: seq += [Linear(dim,(st_ed[1]-st_ed[0]))] self.seq = Sequential(*seq)", "apply_activate(data, output_info): data_t = [] st = 0 for item in output_info: if", "1 st = ed self.interval = np.asarray(self.interval) def sample_train(self, batch): if self.n_col ==", "st_ed[0])==2: c_loss = BCELoss() real_label = real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) loss_cc = c_loss(real_pre,", "as F from torch.nn import (Dropout, LeakyReLU, Linear, Module, ReLU, Sequential, Conv2d, ConvTranspose2d,", "c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) perm = np.arange(self.batch_size) np.random.shuffle(perm) real = data_sampler.sample(self.batch_size, col[perm],", "LeakyReLU(0.2), Dropout(0.5) ] dim = item if (st_ed[1]-st_ed[0])==1: seq += [Linear(dim, 1)] elif", "= fake_label.type_as(fake_pre) real_label = torch.reshape(real_label,real_pre.size()) fake_label = torch.reshape(fake_label,fake_pre.size()) elif (st_ed[1] - st_ed[0])==2: c_loss", "None classifier=None optimizerC= None if target_index != None: st_ed= get_st_ed(target_index,self.transformer.output_info) classifier = Classifier(data_dim,self.class_dim,st_ed).to(self.device)", "1)] elif (st_ed[1]-st_ed[0])==2: seq += [Linear(dim, 1),Sigmoid()] else: seq += [Linear(dim,(st_ed[1]-st_ed[0]))] self.seq =", "[Linear(dim, 1),Sigmoid()] else: seq += [Linear(dim,(st_ed[1]-st_ed[0]))] self.seq = Sequential(*seq) def forward(self, input): label=None", "= self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact = apply_activate(faket, self.transformer.output_info) fake_cat = torch.cat([fakeact, c],", "self.n = len(data) st = 0 for item in output_info: if item[1] ==", "maximum_interval(output_info))) self.p_sampling = [] for item in output_info: if item[1] == 'tanh': st", "max(1, len(train_data) // self.batch_size) for i in tqdm(range(self.epochs)): for _ in range(steps_per_epoch): noisez", "torch.optim import Adam from torch.nn import functional as F from torch.nn import (Dropout,", "random_choice_prob_index_sampling(probs,col_idx): option_list = [] for i in col_idx: pp = probs[i] option_list.append(np.random.choice(np.arange(len(probs[i])), p=pp))", "len(train_data) // self.batch_size) for i in tqdm(range(self.epochs)): for _ in range(steps_per_epoch): noisez =", "torch.cat([fakeact, c], dim=1) real_cat = torch.cat([real, c_perm], dim=1) real_cat_d = self.Dtransformer.transform(real_cat) fake_cat_d =", "def forward(self, input): return (self.seq(input)), self.seq_info(input) class Generator(Module): def __init__(self, side, layers): super(Generator,", "0 c= 0 tc= 0 for item in output_info: if c==target_col_index: break if", "steps_per_epoch = max(1, len(train_data) // self.batch_size) for i in tqdm(range(self.epochs)): for _ in", "= torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) optimizerG.zero_grad() fake = self.generator(noisez) faket =", "1) tmp = tmp / np.sum(tmp) tmp_sampling = tmp_sampling / np.sum(tmp_sampling) self.p_sampling.append(tmp_sampling) self.p[self.n_col,", "self.data = data self.model = [] self.n = len(data) st = 0 for", "mask = np.zeros((batch, self.n_col), dtype='float32') mask[np.arange(batch), idx] = 1 opt1prime = random_choice_prob_index(self.p[idx]) for", "vec def cond_loss(data, output_info, c, m): loss = [] st = 0 st_c", "[] st = 0 for item in output_info: if item[1] == 'tanh': ed", "0.9), eps=1e-3, weight_decay=self.l2scale) optimizerG = Adam(self.generator.parameters(), **optimizer_params) optimizerD = Adam(discriminator.parameters(), **optimizer_params) st_ed =", "self.model = [] st = 0 counter = 0 for item in output_info:", "self.n_opt += item[0] self.n_col += 1 st = ed self.interval = np.asarray(self.interval) def", "layers_D def determine_layers_gen(side, random_dim, num_channels): assert side >= 4 and side <= 32", "self.side = side info = len(layers)-2 self.seq = Sequential(*layers) self.seq_info = Sequential(*layers[:info]) def", "< 4: layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1] // 2)) layers_G = [ ConvTranspose2d( random_dim,", "num_channels=64, l2scale=1e-5, batch_size=500, epochs=1): self.random_dim = random_dim self.class_dim = class_dim self.num_channels = num_channels", "== 'softmax': ed = st + item[0] tmp = [] for j in", "real_label) loss_cg = c_loss(fake_pre, fake_label) optimizerG.zero_grad() loss_cg.backward() optimizerG.step() optimizerC.zero_grad() loss_cc.backward() optimizerC.step() def sample(self,", "st:ed], axis=0) tmp = np.log(tmp + 1) tmp = tmp / np.sum(tmp) tmp_sampling", "= i break layers_G = determine_layers_gen(self.gside, self.random_dim+self.cond_generator.n_opt, self.num_channels) layers_D = determine_layers_disc(self.dside, self.num_channels) self.generator", "self.str_end = st_ed for item in list(dis_dims): seq += [ Linear(dim, item), LeakyReLU(0.2),", "= self.Dtransformer.transform(fake_cat) y_fake,info_fake = discriminator(fake_cat) cross_entropy = cond_loss(faket, self.transformer.output_info, c, m) _,info_real =", "layer_dims[-1][1] // 2)) layers_G = [ ConvTranspose2d( random_dim, layer_dims[-1][0], layer_dims[-1][1], 1, 0, output_padding=0,", "__init__(self,input_dim, dis_dims,st_ed): super(Classifier,self).__init__() dim = input_dim-(st_ed[1]-st_ed[0]) seq = [] self.str_end = st_ed for", "optimizerG.zero_grad() loss_cg.backward() optimizerG.step() optimizerC.zero_grad() loss_cc.backward() optimizerC.step() def sample(self, n): self.generator.eval() output_info = self.transformer.output_info", "_,info_real = discriminator(real_cat_d) g = -(torch.log(y_fake + 1e-4).mean()) + cross_entropy g.backward(retain_graph=True) loss_mean =", "problem_type: target_index = train_data.columns.get_loc(type[problem_type]) self.transformer = DataTransformer(train_data=train_data, categorical_list=categorical, mixed_dict=mixed) self.transformer.fit() train_data = self.transformer.transform(train_data.values)", "optimizer_params = dict(lr=2e-4, betas=(0.5, 0.9), eps=1e-3, weight_decay=self.l2scale) optimizerG = Adam(self.generator.parameters(), **optimizer_params) optimizerD =", "list(dis_dims): seq += [ Linear(dim, item), LeakyReLU(0.2), Dropout(0.5) ] dim = item if", "loss_std loss_info.backward() optimizerG.step() if problem_type: fake = self.generator(noisez) faket = self.Gtransformer.inverse_transform(fake) fakeact =", "init.constant_(m.bias.data, 0) class CTABGANSynthesizer: def __init__(self, class_dim=(256, 256, 256, 256), random_dim=100, num_channels=64, l2scale=1e-5,", "c_loss = BCELoss() real_label = real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) loss_cc = c_loss(real_pre, real_label)", "| ((self.str_end[1]-self.str_end[0])==1): return self.seq(new_imp).view(-1), label else: return self.seq(new_imp), label def apply_activate(data, output_info): data_t", "return self.seq(new_imp).view(-1), label else: return self.seq(new_imp), label def apply_activate(data, output_info): data_t = []", "= 1 opt1prime = random_choice_prob_index(self.p[idx]) for i in np.arange(batch): vec[i, self.interval[idx[i], 0] +", "y_fake + 1e-4).mean())) loss_d.backward() optimizerD.step() noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample_train(self.batch_size)", "loss_cc.backward() optimizerC.step() def sample(self, n): self.generator.eval() output_info = self.transformer.output_info steps = n //", "24, 32] col_size_d = data_dim + self.cond_generator.n_opt for i in sides: if i", "item[0] tmp = [] for j in range(item[0]): tmp.append(np.nonzero(data[:, st + j])[0]) self.model.append(tmp)", "in zip(col, opt): idx.append(np.random.choice(self.model[c][o])) return self.data[idx] class Discriminator(Module): def __init__(self, side, layers): super(Discriminator,", "= None self.l2scale = l2scale self.batch_size = batch_size self.epochs = epochs self.device =", "= random_dim self.class_dim = class_dim self.num_channels = num_channels self.dside = None self.gside =", "+ 1e-4).mean()) + cross_entropy g.backward(retain_graph=True) loss_mean = torch.norm(torch.mean(info_fake.view(self.batch_size,-1), dim=0) - torch.mean(info_real.view(self.batch_size,-1), dim=0), 1)", "dim=1), reduction='none') loss.append(tmp) st = ed st_c = ed_c loss = torch.stack(loss, dim=1)", "torch.cat([noisez, c], dim=1) noisez = noisez.view(self.batch_size,self.random_dim+self.cond_generator.n_opt,1,1) perm = np.arange(self.batch_size) np.random.shuffle(perm) real = data_sampler.sample(self.batch_size,", "= apply_activate(faket, self.transformer.output_info) fake_cat = torch.cat([fakeact, c], dim=1) fake_cat = self.Dtransformer.transform(fake_cat) y_fake,info_fake =", "col_size_d: self.dside = i break sides = [4, 8, 16, 24, 32] col_size_g", "np.array(option_list).reshape(col_idx.shape) def random_choice_prob_index(a, axis=1): r = np.expand_dims(np.random.rand(a.shape[1 - axis]), axis=axis) return (a.cumsum(axis=axis) >", "i * i >= col_size_d: self.dside = i break sides = [4, 8,", "+= item[0] elif item[1] == 'softmax': st += item[0] c+=1 tc+=1 ed= st+output_info[tc][0]", "sides = [4, 8, 16, 24, 32] col_size_g = data_dim for i in", "optimizerG = Adam(self.generator.parameters(), **optimizer_params) optimizerD = Adam(discriminator.parameters(), **optimizer_params) st_ed = None classifier=None optimizerC=", "fake_label) optimizerG.zero_grad() loss_cg.backward() optimizerG.step() optimizerC.zero_grad() loss_cc.backward() optimizerC.step() def sample(self, n): self.generator.eval() output_info =", "== 'softmax': st += item[0] c+=1 tc+=1 ed= st+output_info[tc][0] return (st,ed) def random_choice_prob_index_sampling(probs,col_idx):", "real_label = real_label.type_as(real_pre) fake_label = fake_label.type_as(fake_pre) real_label = torch.reshape(real_label,real_pre.size()) fake_label = torch.reshape(fake_label,fake_pre.size()) elif", "= np.zeros((counter, maximum_interval(output_info))) self.p_sampling = [] for item in output_info: if item[1] ==", "opt1prime[i]] = 1 return vec, mask, idx, opt1prime def sample(self, batch): if self.n_col", "Discriminator(Module): def __init__(self, side, layers): super(Discriminator, self).__init__() self.side = side info = len(layers)-2", "dim=1) fake_cat = self.Dtransformer.transform(fake_cat) y_fake,info_fake = discriminator(fake_cat) cross_entropy = cond_loss(faket, self.transformer.output_info, c, m)", "noisez = torch.randn(self.batch_size, self.random_dim, device=self.device) condvec = self.cond_generator.sample_train(self.batch_size) c, m, col, opt =", "probs[i] option_list.append(np.random.choice(np.arange(len(probs[i])), p=pp)) return np.array(option_list).reshape(col_idx.shape) def random_choice_prob_index(a, axis=1): r = np.expand_dims(np.random.rand(a.shape[1 - axis]),", "col_size_g = data_dim for i in sides: if i * i >= col_size_g:", "layer_dims.append((layer_dims[-1][0] * 2, layer_dims[-1][1] // 2)) layers_G = [ ConvTranspose2d( random_dim, layer_dims[-1][0], layer_dims[-1][1]," ]
[ "\"{0:b}\".format(n) zeros = bin.count(\"0\") return pow(2, zeros) if __name__ == \"__main__\": assert sum_xor(5)", "a 1, and an addition would return a 1 as well. For example:", "x = n ^ x Solve: We count the number of zeros that", "which is binary \"1010\", if you add or XOR 1, you would end", "because for sum and xor to be equal, it occurs when there are", "xor to be equal, it occurs when there are 0s in the digit", "int: The total number of integers that satisfy the sum = xor problem", "of the integer, because for sum and xor to be equal, it occurs", "an addition would return a 1 as well. For example: for the integer", "because of the least significant 0 being flipped to a 1. We then", "n + x = n ^ x Solve: We count the number of", "if n == 0: return 1 bin = \"{0:b}\".format(n) zeros = bin.count(\"0\") return", "that: 0 <= x <= n n + x = n ^ x", "representation of the integer, because for sum and xor to be equal, it", "both, because of the least significant 0 being flipped to a 1. We", "<= x <= n n + x = n ^ x Solve: We", "that are in the binary representation of the integer, because for sum and", "We then return the total combinations of these values, which is 2^(number of", "sum_xor(n): \"\"\"Hackerrank Problem: https://www.hackerrank.com/challenges/sum-vs-xor/problem Given an integer n, find each x such that:", "XOR 1, you would end up with \"1011\" for both, because of the", "return 1 bin = \"{0:b}\".format(n) zeros = bin.count(\"0\") return pow(2, zeros) if __name__", "bin.count(\"0\") return pow(2, zeros) if __name__ == \"__main__\": assert sum_xor(5) == 2 assert", "of integers that satisfy the sum = xor problem \"\"\" if n ==", "zeros that are in the binary representation of the integer, because for sum", "that satisfy the sum = xor problem \"\"\" if n == 0: return", "number of zeros that are in the binary representation of the integer, because", "total combinations of these values, which is 2^(number of zeros) Args: n (int):", "n (int): Integer to check Returns: int: The total number of integers that", "xor problem \"\"\" if n == 0: return 1 bin = \"{0:b}\".format(n) zeros", "<= n n + x = n ^ x Solve: We count the", "it occurs when there are 0s in the digit where an XOR would", "= bin.count(\"0\") return pow(2, zeros) if __name__ == \"__main__\": assert sum_xor(5) == 2", "x <= n n + x = n ^ x Solve: We count", "1, and an addition would return a 1 as well. For example: for", "n, find each x such that: 0 <= x <= n n +", "there are 0s in the digit where an XOR would return a 1,", "n ^ x Solve: We count the number of zeros that are in", "you add or XOR 1, you would end up with \"1011\" for both,", "return pow(2, zeros) if __name__ == \"__main__\": assert sum_xor(5) == 2 assert sum_xor(10)", "count the number of zeros that are in the binary representation of the", "integer \"10\", which is binary \"1010\", if you add or XOR 1, you", "\"10\", which is binary \"1010\", if you add or XOR 1, you would", "__name__ == \"__main__\": assert sum_xor(5) == 2 assert sum_xor(10) == 4 assert sum_xor(0)", "where an XOR would return a 1, and an addition would return a", "least significant 0 being flipped to a 1. We then return the total", "be equal, it occurs when there are 0s in the digit where an", "add or XOR 1, you would end up with \"1011\" for both, because", "combinations of these values, which is 2^(number of zeros) Args: n (int): Integer", "total number of integers that satisfy the sum = xor problem \"\"\" if", "return the total combinations of these values, which is 2^(number of zeros) Args:", "satisfy the sum = xor problem \"\"\" if n == 0: return 1", "zeros) if __name__ == \"__main__\": assert sum_xor(5) == 2 assert sum_xor(10) == 4", "each x such that: 0 <= x <= n n + x =", "and xor to be equal, it occurs when there are 0s in the", "flipped to a 1. We then return the total combinations of these values,", "when there are 0s in the digit where an XOR would return a", "to a 1. We then return the total combinations of these values, which", "as well. For example: for the integer \"10\", which is binary \"1010\", if", "then return the total combinations of these values, which is 2^(number of zeros)", "2^(number of zeros) Args: n (int): Integer to check Returns: int: The total", "are in the binary representation of the integer, because for sum and xor", "would return a 1 as well. For example: for the integer \"10\", which", "are 0s in the digit where an XOR would return a 1, and", "XOR would return a 1, and an addition would return a 1 as", "x Solve: We count the number of zeros that are in the binary", "occurs when there are 0s in the digit where an XOR would return", "The total number of integers that satisfy the sum = xor problem \"\"\"", "the least significant 0 being flipped to a 1. We then return the", "you would end up with \"1011\" for both, because of the least significant", "find each x such that: 0 <= x <= n n + x", "(int): Integer to check Returns: int: The total number of integers that satisfy", "n == 0: return 1 bin = \"{0:b}\".format(n) zeros = bin.count(\"0\") return pow(2,", "example: for the integer \"10\", which is binary \"1010\", if you add or", "in the digit where an XOR would return a 1, and an addition", "and an addition would return a 1 as well. For example: for the", "\"\"\" if n == 0: return 1 bin = \"{0:b}\".format(n) zeros = bin.count(\"0\")", "n n + x = n ^ x Solve: We count the number", "return a 1 as well. For example: for the integer \"10\", which is", "the binary representation of the integer, because for sum and xor to be", "Solve: We count the number of zeros that are in the binary representation", "in the binary representation of the integer, because for sum and xor to", "1 as well. For example: for the integer \"10\", which is binary \"1010\",", "the total combinations of these values, which is 2^(number of zeros) Args: n", "would return a 1, and an addition would return a 1 as well.", "significant 0 being flipped to a 1. We then return the total combinations", "of these values, which is 2^(number of zeros) Args: n (int): Integer to", "Args: n (int): Integer to check Returns: int: The total number of integers", "\"\"\"Hackerrank Problem: https://www.hackerrank.com/challenges/sum-vs-xor/problem Given an integer n, find each x such that: 0", "if you add or XOR 1, you would end up with \"1011\" for", "being flipped to a 1. We then return the total combinations of these", "to be equal, it occurs when there are 0s in the digit where", "1, you would end up with \"1011\" for both, because of the least", "the sum = xor problem \"\"\" if n == 0: return 1 bin", "number of integers that satisfy the sum = xor problem \"\"\" if n", "x such that: 0 <= x <= n n + x = n", "For example: for the integer \"10\", which is binary \"1010\", if you add", "these values, which is 2^(number of zeros) Args: n (int): Integer to check", "integers that satisfy the sum = xor problem \"\"\" if n == 0:", "0: return 1 bin = \"{0:b}\".format(n) zeros = bin.count(\"0\") return pow(2, zeros) if", "== 0: return 1 bin = \"{0:b}\".format(n) zeros = bin.count(\"0\") return pow(2, zeros)", "binary \"1010\", if you add or XOR 1, you would end up with", "+ x = n ^ x Solve: We count the number of zeros", "1 bin = \"{0:b}\".format(n) zeros = bin.count(\"0\") return pow(2, zeros) if __name__ ==", "of zeros that are in the binary representation of the integer, because for", "== \"__main__\": assert sum_xor(5) == 2 assert sum_xor(10) == 4 assert sum_xor(0) ==", "to check Returns: int: The total number of integers that satisfy the sum", "Problem: https://www.hackerrank.com/challenges/sum-vs-xor/problem Given an integer n, find each x such that: 0 <=", "check Returns: int: The total number of integers that satisfy the sum =", "binary representation of the integer, because for sum and xor to be equal,", "problem \"\"\" if n == 0: return 1 bin = \"{0:b}\".format(n) zeros =", "\"__main__\": assert sum_xor(5) == 2 assert sum_xor(10) == 4 assert sum_xor(0) == 1", "We count the number of zeros that are in the binary representation of", "zeros) Args: n (int): Integer to check Returns: int: The total number of", "addition would return a 1 as well. For example: for the integer \"10\",", "the number of zeros that are in the binary representation of the integer,", "with \"1011\" for both, because of the least significant 0 being flipped to", "\"1010\", if you add or XOR 1, you would end up with \"1011\"", "sum = xor problem \"\"\" if n == 0: return 1 bin =", "https://www.hackerrank.com/challenges/sum-vs-xor/problem Given an integer n, find each x such that: 0 <= x", "zeros = bin.count(\"0\") return pow(2, zeros) if __name__ == \"__main__\": assert sum_xor(5) ==", "integer, because for sum and xor to be equal, it occurs when there", "if __name__ == \"__main__\": assert sum_xor(5) == 2 assert sum_xor(10) == 4 assert", "the digit where an XOR would return a 1, and an addition would", "the integer, because for sum and xor to be equal, it occurs when", "0 <= x <= n n + x = n ^ x Solve:", "equal, it occurs when there are 0s in the digit where an XOR", "is binary \"1010\", if you add or XOR 1, you would end up", "a 1. We then return the total combinations of these values, which is", "a 1 as well. For example: for the integer \"10\", which is binary", "0s in the digit where an XOR would return a 1, and an", "an integer n, find each x such that: 0 <= x <= n", "for both, because of the least significant 0 being flipped to a 1.", "would end up with \"1011\" for both, because of the least significant 0", "digit where an XOR would return a 1, and an addition would return", "bin = \"{0:b}\".format(n) zeros = bin.count(\"0\") return pow(2, zeros) if __name__ == \"__main__\":", "pow(2, zeros) if __name__ == \"__main__\": assert sum_xor(5) == 2 assert sum_xor(10) ==", "def sum_xor(n): \"\"\"Hackerrank Problem: https://www.hackerrank.com/challenges/sum-vs-xor/problem Given an integer n, find each x such", "integer n, find each x such that: 0 <= x <= n n", "1. We then return the total combinations of these values, which is 2^(number", "values, which is 2^(number of zeros) Args: n (int): Integer to check Returns:", "of zeros) Args: n (int): Integer to check Returns: int: The total number", "an XOR would return a 1, and an addition would return a 1", "for sum and xor to be equal, it occurs when there are 0s", "= \"{0:b}\".format(n) zeros = bin.count(\"0\") return pow(2, zeros) if __name__ == \"__main__\": assert", "\"1011\" for both, because of the least significant 0 being flipped to a", "for the integer \"10\", which is binary \"1010\", if you add or XOR", "which is 2^(number of zeros) Args: n (int): Integer to check Returns: int:", "of the least significant 0 being flipped to a 1. We then return", "^ x Solve: We count the number of zeros that are in the", "return a 1, and an addition would return a 1 as well. For", "sum and xor to be equal, it occurs when there are 0s in", "such that: 0 <= x <= n n + x = n ^", "Returns: int: The total number of integers that satisfy the sum = xor", "= xor problem \"\"\" if n == 0: return 1 bin = \"{0:b}\".format(n)", "Given an integer n, find each x such that: 0 <= x <=", "= n ^ x Solve: We count the number of zeros that are", "up with \"1011\" for both, because of the least significant 0 being flipped", "is 2^(number of zeros) Args: n (int): Integer to check Returns: int: The", "well. For example: for the integer \"10\", which is binary \"1010\", if you", "0 being flipped to a 1. We then return the total combinations of", "the integer \"10\", which is binary \"1010\", if you add or XOR 1,", "end up with \"1011\" for both, because of the least significant 0 being", "Integer to check Returns: int: The total number of integers that satisfy the", "or XOR 1, you would end up with \"1011\" for both, because of" ]
[ "import path, include from backend.api.views import bulkUploadFromFile urlpatterns = [ path('bulk_upload/', bulkUploadFromFile) ]", "from django.urls import path, include from backend.api.views import bulkUploadFromFile urlpatterns = [ path('bulk_upload/',", "django.urls import path, include from backend.api.views import bulkUploadFromFile urlpatterns = [ path('bulk_upload/', bulkUploadFromFile)", "<gh_stars>0 from django.urls import path, include from backend.api.views import bulkUploadFromFile urlpatterns = [" ]
[ "doesn't it is an ad so we skip it) if entry.find(\"a\", class_=\"comments\") is", "return None def job_logic(): headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:81.0) Gecko/20100101", "gamayun.gamayun_utils import report_error from gamayun.gamayun_utils import run_gamayun_script_logic def parse_single_entry(entry): # test if this", "for entry in soup.find_all(\"div\", class_ = \"top-matter\")] if x is not None] report_result_with_maps_only(result)", "gamayun.gamayun_utils import run_gamayun_script_logic def parse_single_entry(entry): # test if this entry contains comment (if", "gamayun.gamayun_utils import report_result_with_maps_only from gamayun.gamayun_utils import report_error from gamayun.gamayun_utils import run_gamayun_script_logic def parse_single_entry(entry):", "'html.parser') result = [x for x in [parse_single_entry(entry) for entry in soup.find_all(\"div\", class_", "so we skip it) if entry.find(\"a\", class_=\"comments\") is not None: result = dict()", "entry contains comment (if it doesn't it is an ad so we skip", "{'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:81.0) Gecko/20100101 Firefox/81.0'} page = requests.get(url = \"https://old.reddit.com/r/programming/\",", "Gecko/20100101 Firefox/81.0'} page = requests.get(url = \"https://old.reddit.com/r/programming/\", headers = headers) soup = BeautifulSoup(page.content,", "entry.find(\"a\", class_=\"comments\")[\"href\"] return result else: return None def job_logic(): headers = {'User-Agent': 'Mozilla/5.0", "from gamayun.gamayun_utils import run_gamayun_script_logic def parse_single_entry(entry): # test if this entry contains comment", "Firefox/81.0'} page = requests.get(url = \"https://old.reddit.com/r/programming/\", headers = headers) soup = BeautifulSoup(page.content, 'html.parser')", "is an ad so we skip it) if entry.find(\"a\", class_=\"comments\") is not None:", "class_=\"comments\") is not None: result = dict() result[\"title\"] = entry.find(\"a\", class_=\"title\").text result[\"link\"] =", "= headers) soup = BeautifulSoup(page.content, 'html.parser') result = [x for x in [parse_single_entry(entry)", "it) if entry.find(\"a\", class_=\"comments\") is not None: result = dict() result[\"title\"] = entry.find(\"a\",", "None: result = dict() result[\"title\"] = entry.find(\"a\", class_=\"title\").text result[\"link\"] = entry.find(\"a\", class_=\"title\")[\"href\"] result[\"comments_link\"]", "bs4 import BeautifulSoup from gamayun.gamayun_utils import report_result_with_maps_only from gamayun.gamayun_utils import report_error from gamayun.gamayun_utils", "entry in soup.find_all(\"div\", class_ = \"top-matter\")] if x is not None] report_result_with_maps_only(result) run_gamayun_script_logic(job_logic)", "is not None: result = dict() result[\"title\"] = entry.find(\"a\", class_=\"title\").text result[\"link\"] = entry.find(\"a\",", "from gamayun.gamayun_utils import report_error from gamayun.gamayun_utils import run_gamayun_script_logic def parse_single_entry(entry): # test if", "'Mozilla/5.0 (X11; Linux i686; rv:81.0) Gecko/20100101 Firefox/81.0'} page = requests.get(url = \"https://old.reddit.com/r/programming/\", headers", "requests.get(url = \"https://old.reddit.com/r/programming/\", headers = headers) soup = BeautifulSoup(page.content, 'html.parser') result = [x", "headers) soup = BeautifulSoup(page.content, 'html.parser') result = [x for x in [parse_single_entry(entry) for", "result[\"link\"] = entry.find(\"a\", class_=\"title\")[\"href\"] result[\"comments_link\"] = entry.find(\"a\", class_=\"comments\")[\"href\"] return result else: return None", "i686; rv:81.0) Gecko/20100101 Firefox/81.0'} page = requests.get(url = \"https://old.reddit.com/r/programming/\", headers = headers) soup", "class_=\"comments\")[\"href\"] return result else: return None def job_logic(): headers = {'User-Agent': 'Mozilla/5.0 (X11;", "# test if this entry contains comment (if it doesn't it is an", "return result else: return None def job_logic(): headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux", "import BeautifulSoup from gamayun.gamayun_utils import report_result_with_maps_only from gamayun.gamayun_utils import report_error from gamayun.gamayun_utils import", "run_gamayun_script_logic def parse_single_entry(entry): # test if this entry contains comment (if it doesn't", "test if this entry contains comment (if it doesn't it is an ad", "it is an ad so we skip it) if entry.find(\"a\", class_=\"comments\") is not", "result[\"comments_link\"] = entry.find(\"a\", class_=\"comments\")[\"href\"] return result else: return None def job_logic(): headers =", "dict() result[\"title\"] = entry.find(\"a\", class_=\"title\").text result[\"link\"] = entry.find(\"a\", class_=\"title\")[\"href\"] result[\"comments_link\"] = entry.find(\"a\", class_=\"comments\")[\"href\"]", "BeautifulSoup from gamayun.gamayun_utils import report_result_with_maps_only from gamayun.gamayun_utils import report_error from gamayun.gamayun_utils import run_gamayun_script_logic", "entry.find(\"a\", class_=\"comments\") is not None: result = dict() result[\"title\"] = entry.find(\"a\", class_=\"title\").text result[\"link\"]", "parse_single_entry(entry): # test if this entry contains comment (if it doesn't it is", "entry.find(\"a\", class_=\"title\").text result[\"link\"] = entry.find(\"a\", class_=\"title\")[\"href\"] result[\"comments_link\"] = entry.find(\"a\", class_=\"comments\")[\"href\"] return result else:", "contains comment (if it doesn't it is an ad so we skip it)", "def job_logic(): headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:81.0) Gecko/20100101 Firefox/81.0'} page", "headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:81.0) Gecko/20100101 Firefox/81.0'} page = requests.get(url", "= BeautifulSoup(page.content, 'html.parser') result = [x for x in [parse_single_entry(entry) for entry in", "job_logic(): headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:81.0) Gecko/20100101 Firefox/81.0'} page =", "entry.find(\"a\", class_=\"title\")[\"href\"] result[\"comments_link\"] = entry.find(\"a\", class_=\"comments\")[\"href\"] return result else: return None def job_logic():", "result[\"title\"] = entry.find(\"a\", class_=\"title\").text result[\"link\"] = entry.find(\"a\", class_=\"title\")[\"href\"] result[\"comments_link\"] = entry.find(\"a\", class_=\"comments\")[\"href\"] return", "None def job_logic(): headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:81.0) Gecko/20100101 Firefox/81.0'}", "= entry.find(\"a\", class_=\"comments\")[\"href\"] return result else: return None def job_logic(): headers = {'User-Agent':", "def parse_single_entry(entry): # test if this entry contains comment (if it doesn't it", "= \"https://old.reddit.com/r/programming/\", headers = headers) soup = BeautifulSoup(page.content, 'html.parser') result = [x for", "page = requests.get(url = \"https://old.reddit.com/r/programming/\", headers = headers) soup = BeautifulSoup(page.content, 'html.parser') result", "[x for x in [parse_single_entry(entry) for entry in soup.find_all(\"div\", class_ = \"top-matter\")] if", "requests import json from bs4 import BeautifulSoup from gamayun.gamayun_utils import report_result_with_maps_only from gamayun.gamayun_utils", "result else: return None def job_logic(): headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686;", "from bs4 import BeautifulSoup from gamayun.gamayun_utils import report_result_with_maps_only from gamayun.gamayun_utils import report_error from", "if entry.find(\"a\", class_=\"comments\") is not None: result = dict() result[\"title\"] = entry.find(\"a\", class_=\"title\").text", "x in [parse_single_entry(entry) for entry in soup.find_all(\"div\", class_ = \"top-matter\")] if x is", "import run_gamayun_script_logic def parse_single_entry(entry): # test if this entry contains comment (if it", "from gamayun.gamayun_utils import report_result_with_maps_only from gamayun.gamayun_utils import report_error from gamayun.gamayun_utils import run_gamayun_script_logic def", "[parse_single_entry(entry) for entry in soup.find_all(\"div\", class_ = \"top-matter\")] if x is not None]", "in [parse_single_entry(entry) for entry in soup.find_all(\"div\", class_ = \"top-matter\")] if x is not", "ad so we skip it) if entry.find(\"a\", class_=\"comments\") is not None: result =", "import report_result_with_maps_only from gamayun.gamayun_utils import report_error from gamayun.gamayun_utils import run_gamayun_script_logic def parse_single_entry(entry): #", "report_result_with_maps_only from gamayun.gamayun_utils import report_error from gamayun.gamayun_utils import run_gamayun_script_logic def parse_single_entry(entry): # test", "= {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:81.0) Gecko/20100101 Firefox/81.0'} page = requests.get(url =", "rv:81.0) Gecko/20100101 Firefox/81.0'} page = requests.get(url = \"https://old.reddit.com/r/programming/\", headers = headers) soup =", "(if it doesn't it is an ad so we skip it) if entry.find(\"a\",", "= requests.get(url = \"https://old.reddit.com/r/programming/\", headers = headers) soup = BeautifulSoup(page.content, 'html.parser') result =", "import report_error from gamayun.gamayun_utils import run_gamayun_script_logic def parse_single_entry(entry): # test if this entry", "skip it) if entry.find(\"a\", class_=\"comments\") is not None: result = dict() result[\"title\"] =", "json from bs4 import BeautifulSoup from gamayun.gamayun_utils import report_result_with_maps_only from gamayun.gamayun_utils import report_error", "if this entry contains comment (if it doesn't it is an ad so", "= [x for x in [parse_single_entry(entry) for entry in soup.find_all(\"div\", class_ = \"top-matter\")]", "it doesn't it is an ad so we skip it) if entry.find(\"a\", class_=\"comments\")", "not None: result = dict() result[\"title\"] = entry.find(\"a\", class_=\"title\").text result[\"link\"] = entry.find(\"a\", class_=\"title\")[\"href\"]", "report_error from gamayun.gamayun_utils import run_gamayun_script_logic def parse_single_entry(entry): # test if this entry contains", "headers = headers) soup = BeautifulSoup(page.content, 'html.parser') result = [x for x in", "= dict() result[\"title\"] = entry.find(\"a\", class_=\"title\").text result[\"link\"] = entry.find(\"a\", class_=\"title\")[\"href\"] result[\"comments_link\"] = entry.find(\"a\",", "\"https://old.reddit.com/r/programming/\", headers = headers) soup = BeautifulSoup(page.content, 'html.parser') result = [x for x", "class_=\"title\")[\"href\"] result[\"comments_link\"] = entry.find(\"a\", class_=\"comments\")[\"href\"] return result else: return None def job_logic(): headers", "= entry.find(\"a\", class_=\"title\").text result[\"link\"] = entry.find(\"a\", class_=\"title\")[\"href\"] result[\"comments_link\"] = entry.find(\"a\", class_=\"comments\")[\"href\"] return result", "result = dict() result[\"title\"] = entry.find(\"a\", class_=\"title\").text result[\"link\"] = entry.find(\"a\", class_=\"title\")[\"href\"] result[\"comments_link\"] =", "an ad so we skip it) if entry.find(\"a\", class_=\"comments\") is not None: result", "BeautifulSoup(page.content, 'html.parser') result = [x for x in [parse_single_entry(entry) for entry in soup.find_all(\"div\",", "import json from bs4 import BeautifulSoup from gamayun.gamayun_utils import report_result_with_maps_only from gamayun.gamayun_utils import", "comment (if it doesn't it is an ad so we skip it) if", "import requests import json from bs4 import BeautifulSoup from gamayun.gamayun_utils import report_result_with_maps_only from", "we skip it) if entry.find(\"a\", class_=\"comments\") is not None: result = dict() result[\"title\"]", "this entry contains comment (if it doesn't it is an ad so we", "class_=\"title\").text result[\"link\"] = entry.find(\"a\", class_=\"title\")[\"href\"] result[\"comments_link\"] = entry.find(\"a\", class_=\"comments\")[\"href\"] return result else: return", "Linux i686; rv:81.0) Gecko/20100101 Firefox/81.0'} page = requests.get(url = \"https://old.reddit.com/r/programming/\", headers = headers)", "(X11; Linux i686; rv:81.0) Gecko/20100101 Firefox/81.0'} page = requests.get(url = \"https://old.reddit.com/r/programming/\", headers =", "soup = BeautifulSoup(page.content, 'html.parser') result = [x for x in [parse_single_entry(entry) for entry", "result = [x for x in [parse_single_entry(entry) for entry in soup.find_all(\"div\", class_ =", "for x in [parse_single_entry(entry) for entry in soup.find_all(\"div\", class_ = \"top-matter\")] if x", "else: return None def job_logic(): headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:81.0)", "= entry.find(\"a\", class_=\"title\")[\"href\"] result[\"comments_link\"] = entry.find(\"a\", class_=\"comments\")[\"href\"] return result else: return None def" ]
[ "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "8667 2108\", \"correo_electronico\": \"<EMAIL>\", \"fax\": \"+506 7242 2789\", \"web\": \"candy.org\", }, follow_redirects=True, )", "b\"dulce\" in post.data def test_formulario_nueva_unidad(client, auth): from cacao_accounting.database import Unidad auth.login() response =", "\"fax\": \"+506 7242 2789\", \"web\": \"candy.org\", }, follow_redirects=True, ) assert b\"<NAME>\" in post.data", "os.environ.get(\"CACAO_TEST\"): os.environ.pop(\"CACAO_TEST\") app.config[\"ENV\"] = \"production\" else: pass @pytest.fixture def client(app): return app.test_client() @pytest.fixture", "@pytest.fixture def runner(app): return app.test_cli_runner() class AuthActions: def __init__(self, client): self._client = client", "\"+505 8661 2108\", \"fax\": \"+505 2273 0754\", }, ) unidad = Unidad.query.filter_by(unidad=\"test\").first() assert", "post = client.post( \"/accounts/unit/new\", data={ \"id\": \"test\", \"nombre\": \"Test Form\", \"entidad\": \"cacao\", \"correo_electronico\":", "\"J08100000078\", \"nombre_comercial\": \"<NAME>\", \"razon_social\": \"Dulces Mundo Sabor Sociedad Anonima\", \"telefono1\": \"+506 8771 0980\",", "b\"<NAME>\" in post.data assert b\"J08100000078\" in post.data assert b\"Dulces Mundo Sabor Sociedad Anonima\"", "this file except in compliance with the License. # You may obtain a", "return app.test_client() @pytest.fixture def runner(app): return app.test_cli_runner() class AuthActions: def __init__(self, client): self._client", "\"Test Form\", \"id_fiscal\": \"Test Form\", \"id\": \"Test Form\", \"moneda\": \"NIO\", \"tipo_entidad\": \"Asociación\", \"correo_electronico\":", "governing permissions and # limitations under the License. # # Contributors: # -", "8661 2108\", \"fax\": \"+505 2273 0754\", }, follow_redirects=True, ) entidad = Entidad.query.filter_by(entidad=\"Test Form\").first()", "None post = client.post( \"/accounts/unit/new\", data={ \"id\": \"test\", \"nombre\": \"Test Form\", \"entidad\": \"cacao\",", "\"+505 8771 0980\", \"telefono2\": \"+505 8661 2108\", \"fax\": \"+505 2273 0754\", }, )", "client.post( \"/accounts/unit/new\", data={ \"id\": \"test\", \"nombre\": \"Test Form\", \"entidad\": \"cacao\", \"correo_electronico\": \"<EMAIL>\", \"web\":", "pylint: disable=redefined-outer-name import pytest from cacao_accounting import create_app as app_factory from cacao_accounting.database import", "\"nombre\": \"Test Form\", \"entidad\": \"cacao\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\": \"+505 8771 0980\",", "autouse=True) def app(): from cacao_accounting.config import SQLITE app = app_factory( { \"SECRET_KEY\": \"<KEY>\",", "<NAME> # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "ANY KIND, either express or implied. # See the License for the specific", "Unidad.query.filter_by(unidad=\"test\").first() assert unidad is not None assert unidad.entidad == \"cacao\" assert unidad.unidad ==", "from cacao_accounting.database import database from cacao_accounting.datos import base_data, dev_data @pytest.fixture(scope=\"module\", autouse=True) def app():", "None assert entidad.moneda == \"NIO\" assert entidad.entidad == \"Test Form\" def test_formulario_editar_entidad(client, auth):", "in post.data assert b\"+506 7242 2789\" in post.data assert b\"candy.org\" in post.data assert", ") unidad = Unidad.query.filter_by(unidad=\"test\").first() assert unidad is not None assert unidad.entidad == \"cacao\"", "is None post = client.post( \"/accounts/entity/new\", data={ \"nombre_comercial\": \"Test Form\", \"razon_social\": \"Test Form\",", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "Entidad.\" in get.data post = client.post( \"/accounts/entity/edit/dulce\", data={ \"id_fiscal\": \"J08100000078\", \"nombre_comercial\": \"<NAME>\", \"razon_social\":", "\"Test Form\", \"moneda\": \"NIO\", \"tipo_entidad\": \"Asociación\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\": \"+505 8771", "entidad.entidad == \"Test Form\" def test_formulario_editar_entidad(client, auth): from cacao_accounting.database import Entidad auth.login() get", "data={\"usuario\": \"cacao\", \"acceso\": \"cacao\"}) def logout(self): return self._client.get(\"/salir\") @pytest.fixture def auth(client): return AuthActions(client)", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "\"fax\": \"+505 2273 0754\", }, ) unidad = Unidad.query.filter_by(unidad=\"test\").first() assert unidad is not", "8771 0980\", \"telefono2\": \"+505 8661 2108\", \"fax\": \"+505 2273 0754\", }, ) unidad", "License. # # Contributors: # - <NAME> # pylint: disable=redefined-outer-name import pytest from", "OF ANY KIND, either express or implied. # See the License for the", "under the License. # # Contributors: # - <NAME> # pylint: disable=redefined-outer-name import", "8667 2108\" in post.data assert b\"<EMAIL>\" in post.data assert b\"+506 7242 2789\" in", "7242 2789\", \"web\": \"candy.org\", }, follow_redirects=True, ) assert b\"<NAME>\" in post.data assert b\"J08100000078\"", "assert unidad is not None assert unidad.entidad == \"cacao\" assert unidad.unidad == \"test\"", "Sabor Sociedad Anonima\" in post.data assert b\"+506 8771 0980\" in post.data assert b\"+506", "data={ \"id_fiscal\": \"J08100000078\", \"nombre_comercial\": \"<NAME>\", \"razon_social\": \"Dulces Mundo Sabor Sociedad Anonima\", \"telefono1\": \"+506", "def __init__(self, client): self._client = client def login(self): return self._client.post(\"/login\", data={\"usuario\": \"cacao\", \"acceso\":", "assert b\"Editar Entidad.\" in get.data post = client.post( \"/accounts/entity/edit/dulce\", data={ \"id_fiscal\": \"J08100000078\", \"nombre_comercial\":", "\"telefono2\": \"+505 8661 2108\", \"fax\": \"+505 2273 0754\", }, ) unidad = Unidad.query.filter_by(unidad=\"test\").first()", "client.post( \"/accounts/entity/new\", data={ \"nombre_comercial\": \"Test Form\", \"razon_social\": \"Test Form\", \"id_fiscal\": \"Test Form\", \"id\":", "Contributors: # - <NAME> # pylint: disable=redefined-outer-name import pytest from cacao_accounting import create_app", "post.data assert b\"<EMAIL>\" in post.data assert b\"+506 7242 2789\" in post.data assert b\"candy.org\"", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "assert b\"<EMAIL>\" in post.data assert b\"+506 7242 2789\" in post.data assert b\"candy.org\" in", "\"fax\": \"+505 2273 0754\", }, follow_redirects=True, ) entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad", "Mundo Sabor Sociedad Anonima\", \"telefono1\": \"+506 8771 0980\", \"telefono2\": \"+506 8667 2108\", \"correo_electronico\":", "= client.get(\"/accounts/unit/new\") assert b\"Crear Nueva Unidad de Negocios.\" in response.data unidad = Unidad.query.filter_by(unidad=\"Test", "cacao_accounting.config import SQLITE app = app_factory( { \"SECRET_KEY\": \"<KEY>\", \"SQLALCHEMY_DATABASE_URI\": \"sqlite://\", \"SQLALCHEMY_TRACK_MODIFICATIONS\": False,", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "\"cacao\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\": \"+505 8771 0980\", \"telefono2\": \"+505 8661 2108\",", "\"DESKTOPMODE\": False, } ) with app.app_context(): database.drop_all() database.create_all() base_data() dev_data() app.app_context().push() yield app", "assert b\"+506 7242 2789\" in post.data assert b\"candy.org\" in post.data assert b\"dulce\" in", "in post.data assert b\"+506 8667 2108\" in post.data assert b\"<EMAIL>\" in post.data assert", "<reponame>cacao-accounting/cacao-accounting-mockup<gh_stars>1-10 # Copyright 2020 <NAME> # # Licensed under the Apache License, Version", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "Form\", \"id\": \"Test Form\", \"moneda\": \"NIO\", \"tipo_entidad\": \"Asociación\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\":", "the License. # # Contributors: # - <NAME> # pylint: disable=redefined-outer-name import pytest", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "\"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\": \"+505 8771 0980\", \"telefono2\": \"+505 8661 2108\", \"fax\":", "\"id\": \"test\", \"nombre\": \"Test Form\", \"entidad\": \"cacao\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\": \"+505", "\"Dulces Mundo Sabor Sociedad Anonima\", \"telefono1\": \"+506 8771 0980\", \"telefono2\": \"+506 8667 2108\",", "\"telefono1\": \"+506 8771 0980\", \"telefono2\": \"+506 8667 2108\", \"correo_electronico\": \"<EMAIL>\", \"fax\": \"+506 7242", "in post.data assert b\"Dulces Mundo Sabor Sociedad Anonima\" in post.data assert b\"+506 8771", "entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is None post = client.post( \"/accounts/entity/new\", data={", "test_formulario_nueva_entidad(client, auth): from cacao_accounting.database import Entidad auth.login() response = client.get(\"/accounts/entity/new\") assert b\"Crear Nueva", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "from cacao_accounting.database import Unidad auth.login() response = client.get(\"/accounts/unit/new\") assert b\"Crear Nueva Unidad de", "0980\", \"telefono2\": \"+505 8661 2108\", \"fax\": \"+505 2273 0754\", }, ) unidad =", "Nueva Unidad de Negocios.\" in response.data unidad = Unidad.query.filter_by(unidad=\"Test Form\").first() assert unidad is", "= client.post( \"/accounts/entity/edit/dulce\", data={ \"id_fiscal\": \"J08100000078\", \"nombre_comercial\": \"<NAME>\", \"razon_social\": \"Dulces Mundo Sabor Sociedad", "assert entidad is None post = client.post( \"/accounts/entity/new\", data={ \"nombre_comercial\": \"Test Form\", \"razon_social\":", "required by applicable law or agreed to in writing, software # distributed under", "Form\", \"moneda\": \"NIO\", \"tipo_entidad\": \"Asociación\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\": \"+505 8771 0980\",", "language governing permissions and # limitations under the License. # # Contributors: #", "0754\", }, ) unidad = Unidad.query.filter_by(unidad=\"test\").first() assert unidad is not None assert unidad.entidad", "0980\", \"telefono2\": \"+505 8661 2108\", \"fax\": \"+505 2273 0754\", }, follow_redirects=True, ) entidad", "applicable law or agreed to in writing, software # distributed under the License", "b\"Crear Nueva Unidad de Negocios.\" in response.data unidad = Unidad.query.filter_by(unidad=\"Test Form\").first() assert unidad", "\"telefono2\": \"+505 8661 2108\", \"fax\": \"+505 2273 0754\", }, follow_redirects=True, ) entidad =", "= Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is not None assert entidad.moneda == \"NIO\" assert", "Nueva Entidad.\" in response.data entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is None post", "or agreed to in writing, software # distributed under the License is distributed", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "AuthActions: def __init__(self, client): self._client = client def login(self): return self._client.post(\"/login\", data={\"usuario\": \"cacao\",", "assert entidad is not None assert entidad.moneda == \"NIO\" assert entidad.entidad == \"Test", ") with app.app_context(): database.drop_all() database.create_all() base_data() dev_data() app.app_context().push() yield app @pytest.fixture def elimina_variable_entorno(app):", "2108\" in post.data assert b\"<EMAIL>\" in post.data assert b\"+506 7242 2789\" in post.data", "Form\", \"entidad\": \"cacao\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\": \"+505 8771 0980\", \"telefono2\": \"+505", "get.data post = client.post( \"/accounts/entity/edit/dulce\", data={ \"id_fiscal\": \"J08100000078\", \"nombre_comercial\": \"<NAME>\", \"razon_social\": \"Dulces Mundo", "\"+505 8661 2108\", \"fax\": \"+505 2273 0754\", }, follow_redirects=True, ) entidad = Entidad.query.filter_by(entidad=\"Test", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "writing, software # distributed under the License is distributed on an \"AS IS\"", "import create_app as app_factory from cacao_accounting.database import database from cacao_accounting.datos import base_data, dev_data", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "entidad is not None assert entidad.moneda == \"NIO\" assert entidad.entidad == \"Test Form\"", "License. # You may obtain a copy of the License at # #", "cacao_accounting.database import Unidad auth.login() response = client.get(\"/accounts/unit/new\") assert b\"Crear Nueva Unidad de Negocios.\"", "\"nombre_comercial\": \"<NAME>\", \"razon_social\": \"Dulces Mundo Sabor Sociedad Anonima\", \"telefono1\": \"+506 8771 0980\", \"telefono2\":", "= Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is None post = client.post( \"/accounts/entity/new\", data={ \"nombre_comercial\":", "client): self._client = client def login(self): return self._client.post(\"/login\", data={\"usuario\": \"cacao\", \"acceso\": \"cacao\"}) def", "{ \"SECRET_KEY\": \"<KEY>\", \"SQLALCHEMY_DATABASE_URI\": \"sqlite://\", \"SQLALCHEMY_TRACK_MODIFICATIONS\": False, \"TESTING\": True, \"WTF_CSRF_ENABLED\": False, \"DEBUG\": True,", "auth): from cacao_accounting.database import Entidad auth.login() response = client.get(\"/accounts/entity/new\") assert b\"Crear Nueva Entidad.\"", "compliance with the License. # You may obtain a copy of the License", "= client.get(\"/accounts/entity/edit/dulce\") assert b\"Editar Entidad.\" in get.data post = client.post( \"/accounts/entity/edit/dulce\", data={ \"id_fiscal\":", "8771 0980\", \"telefono2\": \"+505 8661 2108\", \"fax\": \"+505 2273 0754\", }, follow_redirects=True, )", "disable=redefined-outer-name import pytest from cacao_accounting import create_app as app_factory from cacao_accounting.database import database", "in response.data unidad = Unidad.query.filter_by(unidad=\"Test Form\").first() assert unidad is None post = client.post(", "= client.get(\"/accounts/entity/new\") assert b\"Crear Nueva Entidad.\" in response.data entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "post.data assert b\"+506 8667 2108\" in post.data assert b\"<EMAIL>\" in post.data assert b\"+506", "unidad is None post = client.post( \"/accounts/unit/new\", data={ \"id\": \"test\", \"nombre\": \"Test Form\",", "if os.environ.get(\"CACAO_TEST\"): os.environ.pop(\"CACAO_TEST\") app.config[\"ENV\"] = \"production\" else: pass @pytest.fixture def client(app): return app.test_client()", "= Unidad.query.filter_by(unidad=\"test\").first() assert unidad is not None assert unidad.entidad == \"cacao\" assert unidad.unidad", "app.app_context(): database.drop_all() database.create_all() base_data() dev_data() app.app_context().push() yield app @pytest.fixture def elimina_variable_entorno(app): import os", "Form\").first() assert entidad is not None assert entidad.moneda == \"NIO\" assert entidad.entidad ==", "\"web\": \"candy.org\", }, follow_redirects=True, ) assert b\"<NAME>\" in post.data assert b\"J08100000078\" in post.data", "login(self): return self._client.post(\"/login\", data={\"usuario\": \"cacao\", \"acceso\": \"cacao\"}) def logout(self): return self._client.get(\"/salir\") @pytest.fixture def", "b\"Dulces Mundo Sabor Sociedad Anonima\" in post.data assert b\"+506 8771 0980\" in post.data", "- <NAME> # pylint: disable=redefined-outer-name import pytest from cacao_accounting import create_app as app_factory", "b\"+506 8667 2108\" in post.data assert b\"<EMAIL>\" in post.data assert b\"+506 7242 2789\"", "not use this file except in compliance with the License. # You may", "auth.login() response = client.get(\"/accounts/entity/new\") assert b\"Crear Nueva Entidad.\" in response.data entidad = Entidad.query.filter_by(entidad=\"Test", "\"razon_social\": \"Test Form\", \"id_fiscal\": \"Test Form\", \"id\": \"Test Form\", \"moneda\": \"NIO\", \"tipo_entidad\": \"Asociación\",", "assert unidad is None post = client.post( \"/accounts/unit/new\", data={ \"id\": \"test\", \"nombre\": \"Test", "in response.data entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is None post = client.post(", "License, Version 2.0 (the \"License\"); # you may not use this file except", "database.create_all() base_data() dev_data() app.app_context().push() yield app @pytest.fixture def elimina_variable_entorno(app): import os if os.environ.get(\"CACAO_TEST\"):", "assert b\"Crear Nueva Unidad de Negocios.\" in response.data unidad = Unidad.query.filter_by(unidad=\"Test Form\").first() assert", "data={ \"nombre_comercial\": \"Test Form\", \"razon_social\": \"Test Form\", \"id_fiscal\": \"Test Form\", \"id\": \"Test Form\",", "assert b\"+506 8771 0980\" in post.data assert b\"+506 8667 2108\" in post.data assert", "\"SQLALCHEMY_TRACK_MODIFICATIONS\": False, \"TESTING\": True, \"WTF_CSRF_ENABLED\": False, \"DEBUG\": True, \"DESKTOPMODE\": False, } ) with", ") entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is not None assert entidad.moneda ==", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is None post = client.post( \"/accounts/entity/new\", data={ \"nombre_comercial\": \"Test", "\"web\": \"https://cacao.io\", \"telefono1\": \"+505 8771 0980\", \"telefono2\": \"+505 8661 2108\", \"fax\": \"+505 2273", "post.data assert b\"dulce\" in post.data def test_formulario_nueva_unidad(client, auth): from cacao_accounting.database import Unidad auth.login()", "\"id_fiscal\": \"J08100000078\", \"nombre_comercial\": \"<NAME>\", \"razon_social\": \"Dulces Mundo Sabor Sociedad Anonima\", \"telefono1\": \"+506 8771", "\"Test Form\", \"razon_social\": \"Test Form\", \"id_fiscal\": \"Test Form\", \"id\": \"Test Form\", \"moneda\": \"NIO\",", "import Entidad auth.login() get = client.get(\"/accounts/entity/edit/dulce\") assert b\"Editar Entidad.\" in get.data post =", "post.data assert b\"candy.org\" in post.data assert b\"dulce\" in post.data def test_formulario_nueva_unidad(client, auth): from", "b\"Editar Entidad.\" in get.data post = client.post( \"/accounts/entity/edit/dulce\", data={ \"id_fiscal\": \"J08100000078\", \"nombre_comercial\": \"<NAME>\",", "\"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\": \"+505 8771 0980\", \"telefono2\": \"+505 8661 2108\", \"fax\": \"+505", "is None post = client.post( \"/accounts/unit/new\", data={ \"id\": \"test\", \"nombre\": \"Test Form\", \"entidad\":", "# you may not use this file except in compliance with the License.", "0980\" in post.data assert b\"+506 8667 2108\" in post.data assert b\"<EMAIL>\" in post.data", "\"entidad\": \"cacao\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\": \"+505 8771 0980\", \"telefono2\": \"+505 8661", "agreed to in writing, software # distributed under the License is distributed on", "\"TESTING\": True, \"WTF_CSRF_ENABLED\": False, \"DEBUG\": True, \"DESKTOPMODE\": False, } ) with app.app_context(): database.drop_all()", "import Unidad auth.login() response = client.get(\"/accounts/unit/new\") assert b\"Crear Nueva Unidad de Negocios.\" in", "}, ) unidad = Unidad.query.filter_by(unidad=\"test\").first() assert unidad is not None assert unidad.entidad ==", "unidad = Unidad.query.filter_by(unidad=\"test\").first() assert unidad is not None assert unidad.entidad == \"cacao\" assert", "(the \"License\"); # you may not use this file except in compliance with", "Sociedad Anonima\", \"telefono1\": \"+506 8771 0980\", \"telefono2\": \"+506 8667 2108\", \"correo_electronico\": \"<EMAIL>\", \"fax\":", "# Contributors: # - <NAME> # pylint: disable=redefined-outer-name import pytest from cacao_accounting import", "self._client = client def login(self): return self._client.post(\"/login\", data={\"usuario\": \"cacao\", \"acceso\": \"cacao\"}) def logout(self):", "client.get(\"/accounts/unit/new\") assert b\"Crear Nueva Unidad de Negocios.\" in response.data unidad = Unidad.query.filter_by(unidad=\"Test Form\").first()", "# Unless required by applicable law or agreed to in writing, software #", "by applicable law or agreed to in writing, software # distributed under the", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "\"Test Form\", \"entidad\": \"cacao\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\": \"+505 8771 0980\", \"telefono2\":", "self._client.post(\"/login\", data={\"usuario\": \"cacao\", \"acceso\": \"cacao\"}) def logout(self): return self._client.get(\"/salir\") @pytest.fixture def auth(client): return", "\"<NAME>\", \"razon_social\": \"Dulces Mundo Sabor Sociedad Anonima\", \"telefono1\": \"+506 8771 0980\", \"telefono2\": \"+506", "Unidad de Negocios.\" in response.data unidad = Unidad.query.filter_by(unidad=\"Test Form\").first() assert unidad is None", "in post.data def test_formulario_nueva_unidad(client, auth): from cacao_accounting.database import Unidad auth.login() response = client.get(\"/accounts/unit/new\")", "2273 0754\", }, ) unidad = Unidad.query.filter_by(unidad=\"test\").first() assert unidad is not None assert", "file except in compliance with the License. # You may obtain a copy", "def test_formulario_nueva_unidad(client, auth): from cacao_accounting.database import Unidad auth.login() response = client.get(\"/accounts/unit/new\") assert b\"Crear", "True, \"WTF_CSRF_ENABLED\": False, \"DEBUG\": True, \"DESKTOPMODE\": False, } ) with app.app_context(): database.drop_all() database.create_all()", "Anonima\", \"telefono1\": \"+506 8771 0980\", \"telefono2\": \"+506 8667 2108\", \"correo_electronico\": \"<EMAIL>\", \"fax\": \"+506", "from cacao_accounting.config import SQLITE app = app_factory( { \"SECRET_KEY\": \"<KEY>\", \"SQLALCHEMY_DATABASE_URI\": \"sqlite://\", \"SQLALCHEMY_TRACK_MODIFICATIONS\":", "\"WTF_CSRF_ENABLED\": False, \"DEBUG\": True, \"DESKTOPMODE\": False, } ) with app.app_context(): database.drop_all() database.create_all() base_data()", "in post.data assert b\"J08100000078\" in post.data assert b\"Dulces Mundo Sabor Sociedad Anonima\" in", "\"id_fiscal\": \"Test Form\", \"id\": \"Test Form\", \"moneda\": \"NIO\", \"tipo_entidad\": \"Asociación\", \"correo_electronico\": \"<EMAIL>\", \"web\":", "yield app @pytest.fixture def elimina_variable_entorno(app): import os if os.environ.get(\"CACAO_TEST\"): os.environ.pop(\"CACAO_TEST\") app.config[\"ENV\"] = \"production\"", "License for the specific language governing permissions and # limitations under the License.", "assert b\"candy.org\" in post.data assert b\"dulce\" in post.data def test_formulario_nueva_unidad(client, auth): from cacao_accounting.database", "assert b\"J08100000078\" in post.data assert b\"Dulces Mundo Sabor Sociedad Anonima\" in post.data assert", "to in writing, software # distributed under the License is distributed on an", "cacao_accounting import create_app as app_factory from cacao_accounting.database import database from cacao_accounting.datos import base_data,", "8771 0980\", \"telefono2\": \"+506 8667 2108\", \"correo_electronico\": \"<EMAIL>\", \"fax\": \"+506 7242 2789\", \"web\":", "\"telefono1\": \"+505 8771 0980\", \"telefono2\": \"+505 8661 2108\", \"fax\": \"+505 2273 0754\", },", "implied. # See the License for the specific language governing permissions and #", "\"License\"); # you may not use this file except in compliance with the", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "# # Contributors: # - <NAME> # pylint: disable=redefined-outer-name import pytest from cacao_accounting", "8661 2108\", \"fax\": \"+505 2273 0754\", }, ) unidad = Unidad.query.filter_by(unidad=\"test\").first() assert unidad", "logout(self): return self._client.get(\"/salir\") @pytest.fixture def auth(client): return AuthActions(client) def test_formulario_nueva_entidad(client, auth): from cacao_accounting.database", "class AuthActions: def __init__(self, client): self._client = client def login(self): return self._client.post(\"/login\", data={\"usuario\":", "app = app_factory( { \"SECRET_KEY\": \"<KEY>\", \"SQLALCHEMY_DATABASE_URI\": \"sqlite://\", \"SQLALCHEMY_TRACK_MODIFICATIONS\": False, \"TESTING\": True, \"WTF_CSRF_ENABLED\":", "and # limitations under the License. # # Contributors: # - <NAME> #", "False, \"DEBUG\": True, \"DESKTOPMODE\": False, } ) with app.app_context(): database.drop_all() database.create_all() base_data() dev_data()", "response = client.get(\"/accounts/unit/new\") assert b\"Crear Nueva Unidad de Negocios.\" in response.data unidad =", "data={ \"id\": \"test\", \"nombre\": \"Test Form\", \"entidad\": \"cacao\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\":", "or implied. # See the License for the specific language governing permissions and", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "dev_data() app.app_context().push() yield app @pytest.fixture def elimina_variable_entorno(app): import os if os.environ.get(\"CACAO_TEST\"): os.environ.pop(\"CACAO_TEST\") app.config[\"ENV\"]", "assert entidad.moneda == \"NIO\" assert entidad.entidad == \"Test Form\" def test_formulario_editar_entidad(client, auth): from", "client.get(\"/accounts/entity/edit/dulce\") assert b\"Editar Entidad.\" in get.data post = client.post( \"/accounts/entity/edit/dulce\", data={ \"id_fiscal\": \"J08100000078\",", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "in writing, software # distributed under the License is distributed on an \"AS", "auth(client): return AuthActions(client) def test_formulario_nueva_entidad(client, auth): from cacao_accounting.database import Entidad auth.login() response =", "not None assert entidad.moneda == \"NIO\" assert entidad.entidad == \"Test Form\" def test_formulario_editar_entidad(client,", "auth.login() response = client.get(\"/accounts/unit/new\") assert b\"Crear Nueva Unidad de Negocios.\" in response.data unidad", "response = client.get(\"/accounts/entity/new\") assert b\"Crear Nueva Entidad.\" in response.data entidad = Entidad.query.filter_by(entidad=\"Test Form\").first()", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "pass @pytest.fixture def client(app): return app.test_client() @pytest.fixture def runner(app): return app.test_cli_runner() class AuthActions:", "auth.login() get = client.get(\"/accounts/entity/edit/dulce\") assert b\"Editar Entidad.\" in get.data post = client.post( \"/accounts/entity/edit/dulce\",", "2020 <NAME> # # Licensed under the Apache License, Version 2.0 (the \"License\");", "\"Test Form\" def test_formulario_editar_entidad(client, auth): from cacao_accounting.database import Entidad auth.login() get = client.get(\"/accounts/entity/edit/dulce\")", "\"telefono2\": \"+506 8667 2108\", \"correo_electronico\": \"<EMAIL>\", \"fax\": \"+506 7242 2789\", \"web\": \"candy.org\", },", "import Entidad auth.login() response = client.get(\"/accounts/entity/new\") assert b\"Crear Nueva Entidad.\" in response.data entidad", "\"+506 8667 2108\", \"correo_electronico\": \"<EMAIL>\", \"fax\": \"+506 7242 2789\", \"web\": \"candy.org\", }, follow_redirects=True,", "post.data assert b\"J08100000078\" in post.data assert b\"Dulces Mundo Sabor Sociedad Anonima\" in post.data", "def elimina_variable_entorno(app): import os if os.environ.get(\"CACAO_TEST\"): os.environ.pop(\"CACAO_TEST\") app.config[\"ENV\"] = \"production\" else: pass @pytest.fixture", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "you may not use this file except in compliance with the License. #", "Sabor Sociedad Anonima\", \"telefono1\": \"+506 8771 0980\", \"telefono2\": \"+506 8667 2108\", \"correo_electronico\": \"<EMAIL>\",", "Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is not None assert entidad.moneda == \"NIO\" assert entidad.entidad", ") assert b\"<NAME>\" in post.data assert b\"J08100000078\" in post.data assert b\"Dulces Mundo Sabor", "from cacao_accounting import create_app as app_factory from cacao_accounting.database import database from cacao_accounting.datos import", "cacao_accounting.datos import base_data, dev_data @pytest.fixture(scope=\"module\", autouse=True) def app(): from cacao_accounting.config import SQLITE app", "@pytest.fixture(scope=\"module\", autouse=True) def app(): from cacao_accounting.config import SQLITE app = app_factory( { \"SECRET_KEY\":", "entidad is None post = client.post( \"/accounts/entity/new\", data={ \"nombre_comercial\": \"Test Form\", \"razon_social\": \"Test", "8771 0980\" in post.data assert b\"+506 8667 2108\" in post.data assert b\"<EMAIL>\" in", "False, \"TESTING\": True, \"WTF_CSRF_ENABLED\": False, \"DEBUG\": True, \"DESKTOPMODE\": False, } ) with app.app_context():", "= client def login(self): return self._client.post(\"/login\", data={\"usuario\": \"cacao\", \"acceso\": \"cacao\"}) def logout(self): return", "use this file except in compliance with the License. # You may obtain", "2789\" in post.data assert b\"candy.org\" in post.data assert b\"dulce\" in post.data def test_formulario_nueva_unidad(client,", "in post.data assert b\"+506 8771 0980\" in post.data assert b\"+506 8667 2108\" in", "Anonima\" in post.data assert b\"+506 8771 0980\" in post.data assert b\"+506 8667 2108\"", "Form\" def test_formulario_editar_entidad(client, auth): from cacao_accounting.database import Entidad auth.login() get = client.get(\"/accounts/entity/edit/dulce\") assert", "dev_data @pytest.fixture(scope=\"module\", autouse=True) def app(): from cacao_accounting.config import SQLITE app = app_factory( {", "else: pass @pytest.fixture def client(app): return app.test_client() @pytest.fixture def runner(app): return app.test_cli_runner() class", "import base_data, dev_data @pytest.fixture(scope=\"module\", autouse=True) def app(): from cacao_accounting.config import SQLITE app =", "specific language governing permissions and # limitations under the License. # # Contributors:", "\"tipo_entidad\": \"Asociación\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\": \"+505 8771 0980\", \"telefono2\": \"+505 8661", "test_formulario_nueva_unidad(client, auth): from cacao_accounting.database import Unidad auth.login() response = client.get(\"/accounts/unit/new\") assert b\"Crear Nueva", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "\"acceso\": \"cacao\"}) def logout(self): return self._client.get(\"/salir\") @pytest.fixture def auth(client): return AuthActions(client) def test_formulario_nueva_entidad(client,", "follow_redirects=True, ) assert b\"<NAME>\" in post.data assert b\"J08100000078\" in post.data assert b\"Dulces Mundo", "0754\", }, follow_redirects=True, ) entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is not None", "app @pytest.fixture def elimina_variable_entorno(app): import os if os.environ.get(\"CACAO_TEST\"): os.environ.pop(\"CACAO_TEST\") app.config[\"ENV\"] = \"production\" else:", "2.0 (the \"License\"); # you may not use this file except in compliance", "post.data assert b\"Dulces Mundo Sabor Sociedad Anonima\" in post.data assert b\"+506 8771 0980\"", "\"sqlite://\", \"SQLALCHEMY_TRACK_MODIFICATIONS\": False, \"TESTING\": True, \"WTF_CSRF_ENABLED\": False, \"DEBUG\": True, \"DESKTOPMODE\": False, } )", "Unidad auth.login() response = client.get(\"/accounts/unit/new\") assert b\"Crear Nueva Unidad de Negocios.\" in response.data", "def logout(self): return self._client.get(\"/salir\") @pytest.fixture def auth(client): return AuthActions(client) def test_formulario_nueva_entidad(client, auth): from", "<NAME> # pylint: disable=redefined-outer-name import pytest from cacao_accounting import create_app as app_factory from", "for the specific language governing permissions and # limitations under the License. #", "return AuthActions(client) def test_formulario_nueva_entidad(client, auth): from cacao_accounting.database import Entidad auth.login() response = client.get(\"/accounts/entity/new\")", "cacao_accounting.database import Entidad auth.login() response = client.get(\"/accounts/entity/new\") assert b\"Crear Nueva Entidad.\" in response.data", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "\"Asociación\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\": \"+505 8771 0980\", \"telefono2\": \"+505 8661 2108\",", "as app_factory from cacao_accounting.database import database from cacao_accounting.datos import base_data, dev_data @pytest.fixture(scope=\"module\", autouse=True)", "# pylint: disable=redefined-outer-name import pytest from cacao_accounting import create_app as app_factory from cacao_accounting.database", "# # Unless required by applicable law or agreed to in writing, software", "} ) with app.app_context(): database.drop_all() database.create_all() base_data() dev_data() app.app_context().push() yield app @pytest.fixture def", "\"SQLALCHEMY_DATABASE_URI\": \"sqlite://\", \"SQLALCHEMY_TRACK_MODIFICATIONS\": False, \"TESTING\": True, \"WTF_CSRF_ENABLED\": False, \"DEBUG\": True, \"DESKTOPMODE\": False, }", "runner(app): return app.test_cli_runner() class AuthActions: def __init__(self, client): self._client = client def login(self):", "express or implied. # See the License for the specific language governing permissions", "== \"Test Form\" def test_formulario_editar_entidad(client, auth): from cacao_accounting.database import Entidad auth.login() get =", "response.data entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is None post = client.post( \"/accounts/entity/new\",", "False, } ) with app.app_context(): database.drop_all() database.create_all() base_data() dev_data() app.app_context().push() yield app @pytest.fixture", "\"moneda\": \"NIO\", \"tipo_entidad\": \"Asociación\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\": \"+505 8771 0980\", \"telefono2\":", "2789\", \"web\": \"candy.org\", }, follow_redirects=True, ) assert b\"<NAME>\" in post.data assert b\"J08100000078\" in", "app.app_context().push() yield app @pytest.fixture def elimina_variable_entorno(app): import os if os.environ.get(\"CACAO_TEST\"): os.environ.pop(\"CACAO_TEST\") app.config[\"ENV\"] =", "self._client.get(\"/salir\") @pytest.fixture def auth(client): return AuthActions(client) def test_formulario_nueva_entidad(client, auth): from cacao_accounting.database import Entidad", "\"<EMAIL>\", \"fax\": \"+506 7242 2789\", \"web\": \"candy.org\", }, follow_redirects=True, ) assert b\"<NAME>\" in", "elimina_variable_entorno(app): import os if os.environ.get(\"CACAO_TEST\"): os.environ.pop(\"CACAO_TEST\") app.config[\"ENV\"] = \"production\" else: pass @pytest.fixture def", "either express or implied. # See the License for the specific language governing", "cacao_accounting.database import Entidad auth.login() get = client.get(\"/accounts/entity/edit/dulce\") assert b\"Editar Entidad.\" in get.data post", "from cacao_accounting.database import Entidad auth.login() response = client.get(\"/accounts/entity/new\") assert b\"Crear Nueva Entidad.\" in", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "app.test_client() @pytest.fixture def runner(app): return app.test_cli_runner() class AuthActions: def __init__(self, client): self._client =", "Copyright 2020 <NAME> # # Licensed under the Apache License, Version 2.0 (the", "assert b\"Dulces Mundo Sabor Sociedad Anonima\" in post.data assert b\"+506 8771 0980\" in", "\"+506 8771 0980\", \"telefono2\": \"+506 8667 2108\", \"correo_electronico\": \"<EMAIL>\", \"fax\": \"+506 7242 2789\",", "assert entidad.entidad == \"Test Form\" def test_formulario_editar_entidad(client, auth): from cacao_accounting.database import Entidad auth.login()", "\"razon_social\": \"Dulces Mundo Sabor Sociedad Anonima\", \"telefono1\": \"+506 8771 0980\", \"telefono2\": \"+506 8667", "True, \"DESKTOPMODE\": False, } ) with app.app_context(): database.drop_all() database.create_all() base_data() dev_data() app.app_context().push() yield", "post.data assert b\"+506 7242 2789\" in post.data assert b\"candy.org\" in post.data assert b\"dulce\"", "2108\", \"correo_electronico\": \"<EMAIL>\", \"fax\": \"+506 7242 2789\", \"web\": \"candy.org\", }, follow_redirects=True, ) assert", "2273 0754\", }, follow_redirects=True, ) entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is not", "get = client.get(\"/accounts/entity/edit/dulce\") assert b\"Editar Entidad.\" in get.data post = client.post( \"/accounts/entity/edit/dulce\", data={", "\"+505 2273 0754\", }, follow_redirects=True, ) entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is", "entidad.moneda == \"NIO\" assert entidad.entidad == \"Test Form\" def test_formulario_editar_entidad(client, auth): from cacao_accounting.database", "the License. # You may obtain a copy of the License at #", "def test_formulario_editar_entidad(client, auth): from cacao_accounting.database import Entidad auth.login() get = client.get(\"/accounts/entity/edit/dulce\") assert b\"Editar", "in get.data post = client.post( \"/accounts/entity/edit/dulce\", data={ \"id_fiscal\": \"J08100000078\", \"nombre_comercial\": \"<NAME>\", \"razon_social\": \"Dulces", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "= client.post( \"/accounts/entity/new\", data={ \"nombre_comercial\": \"Test Form\", \"razon_social\": \"Test Form\", \"id_fiscal\": \"Test Form\",", "return self._client.post(\"/login\", data={\"usuario\": \"cacao\", \"acceso\": \"cacao\"}) def logout(self): return self._client.get(\"/salir\") @pytest.fixture def auth(client):", "in post.data assert b\"<EMAIL>\" in post.data assert b\"+506 7242 2789\" in post.data assert", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "os.environ.pop(\"CACAO_TEST\") app.config[\"ENV\"] = \"production\" else: pass @pytest.fixture def client(app): return app.test_client() @pytest.fixture def", "\"+505 8771 0980\", \"telefono2\": \"+505 8661 2108\", \"fax\": \"+505 2273 0754\", }, follow_redirects=True,", "post = client.post( \"/accounts/entity/edit/dulce\", data={ \"id_fiscal\": \"J08100000078\", \"nombre_comercial\": \"<NAME>\", \"razon_social\": \"Dulces Mundo Sabor", "Negocios.\" in response.data unidad = Unidad.query.filter_by(unidad=\"Test Form\").first() assert unidad is None post =", "\"<KEY>\", \"SQLALCHEMY_DATABASE_URI\": \"sqlite://\", \"SQLALCHEMY_TRACK_MODIFICATIONS\": False, \"TESTING\": True, \"WTF_CSRF_ENABLED\": False, \"DEBUG\": True, \"DESKTOPMODE\": False,", "= \"production\" else: pass @pytest.fixture def client(app): return app.test_client() @pytest.fixture def runner(app): return", "\"+505 2273 0754\", }, ) unidad = Unidad.query.filter_by(unidad=\"test\").first() assert unidad is not None", "permissions and # limitations under the License. # # Contributors: # - <NAME>", "app(): from cacao_accounting.config import SQLITE app = app_factory( { \"SECRET_KEY\": \"<KEY>\", \"SQLALCHEMY_DATABASE_URI\": \"sqlite://\",", "Form\", \"razon_social\": \"Test Form\", \"id_fiscal\": \"Test Form\", \"id\": \"Test Form\", \"moneda\": \"NIO\", \"tipo_entidad\":", "Entidad.\" in response.data entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is None post =", "return self._client.get(\"/salir\") @pytest.fixture def auth(client): return AuthActions(client) def test_formulario_nueva_entidad(client, auth): from cacao_accounting.database import", "\"/accounts/unit/new\", data={ \"id\": \"test\", \"nombre\": \"Test Form\", \"entidad\": \"cacao\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\",", "b\"<EMAIL>\" in post.data assert b\"+506 7242 2789\" in post.data assert b\"candy.org\" in post.data", "assert b\"+506 8667 2108\" in post.data assert b\"<EMAIL>\" in post.data assert b\"+506 7242", "7242 2789\" in post.data assert b\"candy.org\" in post.data assert b\"dulce\" in post.data def", "Form\").first() assert unidad is None post = client.post( \"/accounts/unit/new\", data={ \"id\": \"test\", \"nombre\":", "b\"candy.org\" in post.data assert b\"dulce\" in post.data def test_formulario_nueva_unidad(client, auth): from cacao_accounting.database import", "}, follow_redirects=True, ) assert b\"<NAME>\" in post.data assert b\"J08100000078\" in post.data assert b\"Dulces", "with the License. # You may obtain a copy of the License at", "b\"J08100000078\" in post.data assert b\"Dulces Mundo Sabor Sociedad Anonima\" in post.data assert b\"+506", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "Entidad auth.login() get = client.get(\"/accounts/entity/edit/dulce\") assert b\"Editar Entidad.\" in get.data post = client.post(", "import pytest from cacao_accounting import create_app as app_factory from cacao_accounting.database import database from", "\"id\": \"Test Form\", \"moneda\": \"NIO\", \"tipo_entidad\": \"Asociación\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\": \"+505", "# Copyright 2020 <NAME> # # Licensed under the Apache License, Version 2.0", "app.test_cli_runner() class AuthActions: def __init__(self, client): self._client = client def login(self): return self._client.post(\"/login\",", "b\"Crear Nueva Entidad.\" in response.data entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is None", "\"production\" else: pass @pytest.fixture def client(app): return app.test_client() @pytest.fixture def runner(app): return app.test_cli_runner()", "assert b\"Crear Nueva Entidad.\" in response.data entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is", "entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is not None assert entidad.moneda == \"NIO\"", "in post.data assert b\"candy.org\" in post.data assert b\"dulce\" in post.data def test_formulario_nueva_unidad(client, auth):", "auth): from cacao_accounting.database import Entidad auth.login() get = client.get(\"/accounts/entity/edit/dulce\") assert b\"Editar Entidad.\" in", "__init__(self, client): self._client = client def login(self): return self._client.post(\"/login\", data={\"usuario\": \"cacao\", \"acceso\": \"cacao\"})", "law or agreed to in writing, software # distributed under the License is", "\"nombre_comercial\": \"Test Form\", \"razon_social\": \"Test Form\", \"id_fiscal\": \"Test Form\", \"id\": \"Test Form\", \"moneda\":", "the License for the specific language governing permissions and # limitations under the", "\"+506 7242 2789\", \"web\": \"candy.org\", }, follow_redirects=True, ) assert b\"<NAME>\" in post.data assert", "from cacao_accounting.datos import base_data, dev_data @pytest.fixture(scope=\"module\", autouse=True) def app(): from cacao_accounting.config import SQLITE", "response.data unidad = Unidad.query.filter_by(unidad=\"Test Form\").first() assert unidad is None post = client.post( \"/accounts/unit/new\",", "auth): from cacao_accounting.database import Unidad auth.login() response = client.get(\"/accounts/unit/new\") assert b\"Crear Nueva Unidad", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "None post = client.post( \"/accounts/entity/new\", data={ \"nombre_comercial\": \"Test Form\", \"razon_social\": \"Test Form\", \"id_fiscal\":", "pytest from cacao_accounting import create_app as app_factory from cacao_accounting.database import database from cacao_accounting.datos", "AuthActions(client) def test_formulario_nueva_entidad(client, auth): from cacao_accounting.database import Entidad auth.login() response = client.get(\"/accounts/entity/new\") assert", "def auth(client): return AuthActions(client) def test_formulario_nueva_entidad(client, auth): from cacao_accounting.database import Entidad auth.login() response", "2108\", \"fax\": \"+505 2273 0754\", }, follow_redirects=True, ) entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert", "in compliance with the License. # You may obtain a copy of the", "database from cacao_accounting.datos import base_data, dev_data @pytest.fixture(scope=\"module\", autouse=True) def app(): from cacao_accounting.config import", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "test_formulario_editar_entidad(client, auth): from cacao_accounting.database import Entidad auth.login() get = client.get(\"/accounts/entity/edit/dulce\") assert b\"Editar Entidad.\"", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "2108\", \"fax\": \"+505 2273 0754\", }, ) unidad = Unidad.query.filter_by(unidad=\"test\").first() assert unidad is", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "@pytest.fixture def elimina_variable_entorno(app): import os if os.environ.get(\"CACAO_TEST\"): os.environ.pop(\"CACAO_TEST\") app.config[\"ENV\"] = \"production\" else: pass", "return app.test_cli_runner() class AuthActions: def __init__(self, client): self._client = client def login(self): return", "b\"+506 8771 0980\" in post.data assert b\"+506 8667 2108\" in post.data assert b\"<EMAIL>\"", "cacao_accounting.database import database from cacao_accounting.datos import base_data, dev_data @pytest.fixture(scope=\"module\", autouse=True) def app(): from", "post = client.post( \"/accounts/entity/new\", data={ \"nombre_comercial\": \"Test Form\", \"razon_social\": \"Test Form\", \"id_fiscal\": \"Test", "See the License for the specific language governing permissions and # limitations under", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "\"cacao\", \"acceso\": \"cacao\"}) def logout(self): return self._client.get(\"/salir\") @pytest.fixture def auth(client): return AuthActions(client) def", "app_factory( { \"SECRET_KEY\": \"<KEY>\", \"SQLALCHEMY_DATABASE_URI\": \"sqlite://\", \"SQLALCHEMY_TRACK_MODIFICATIONS\": False, \"TESTING\": True, \"WTF_CSRF_ENABLED\": False, \"DEBUG\":", "from cacao_accounting.database import Entidad auth.login() get = client.get(\"/accounts/entity/edit/dulce\") assert b\"Editar Entidad.\" in get.data", "is not None assert entidad.moneda == \"NIO\" assert entidad.entidad == \"Test Form\" def", "limitations under the License. # # Contributors: # - <NAME> # pylint: disable=redefined-outer-name", "\"https://cacao.io\", \"telefono1\": \"+505 8771 0980\", \"telefono2\": \"+505 8661 2108\", \"fax\": \"+505 2273 0754\",", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "\"cacao\"}) def logout(self): return self._client.get(\"/salir\") @pytest.fixture def auth(client): return AuthActions(client) def test_formulario_nueva_entidad(client, auth):", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "post.data assert b\"+506 8771 0980\" in post.data assert b\"+506 8667 2108\" in post.data", "Sociedad Anonima\" in post.data assert b\"+506 8771 0980\" in post.data assert b\"+506 8667", "SQLITE app = app_factory( { \"SECRET_KEY\": \"<KEY>\", \"SQLALCHEMY_DATABASE_URI\": \"sqlite://\", \"SQLALCHEMY_TRACK_MODIFICATIONS\": False, \"TESTING\": True,", "\"test\", \"nombre\": \"Test Form\", \"entidad\": \"cacao\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\": \"+505 8771", "create_app as app_factory from cacao_accounting.database import database from cacao_accounting.datos import base_data, dev_data @pytest.fixture(scope=\"module\",", "base_data() dev_data() app.app_context().push() yield app @pytest.fixture def elimina_variable_entorno(app): import os if os.environ.get(\"CACAO_TEST\"): os.environ.pop(\"CACAO_TEST\")", "\"correo_electronico\": \"<EMAIL>\", \"fax\": \"+506 7242 2789\", \"web\": \"candy.org\", }, follow_redirects=True, ) assert b\"<NAME>\"", "= Unidad.query.filter_by(unidad=\"Test Form\").first() assert unidad is None post = client.post( \"/accounts/unit/new\", data={ \"id\":", "# limitations under the License. # # Contributors: # - <NAME> # pylint:", "Version 2.0 (the \"License\"); # you may not use this file except in", "assert b\"<NAME>\" in post.data assert b\"J08100000078\" in post.data assert b\"Dulces Mundo Sabor Sociedad", "except in compliance with the License. # You may obtain a copy of", "app.config[\"ENV\"] = \"production\" else: pass @pytest.fixture def client(app): return app.test_client() @pytest.fixture def runner(app):", "}, follow_redirects=True, ) entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is not None assert", "os if os.environ.get(\"CACAO_TEST\"): os.environ.pop(\"CACAO_TEST\") app.config[\"ENV\"] = \"production\" else: pass @pytest.fixture def client(app): return", "import SQLITE app = app_factory( { \"SECRET_KEY\": \"<KEY>\", \"SQLALCHEMY_DATABASE_URI\": \"sqlite://\", \"SQLALCHEMY_TRACK_MODIFICATIONS\": False, \"TESTING\":", "client.post( \"/accounts/entity/edit/dulce\", data={ \"id_fiscal\": \"J08100000078\", \"nombre_comercial\": \"<NAME>\", \"razon_social\": \"Dulces Mundo Sabor Sociedad Anonima\",", "client def login(self): return self._client.post(\"/login\", data={\"usuario\": \"cacao\", \"acceso\": \"cacao\"}) def logout(self): return self._client.get(\"/salir\")", "def runner(app): return app.test_cli_runner() class AuthActions: def __init__(self, client): self._client = client def", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "database.drop_all() database.create_all() base_data() dev_data() app.app_context().push() yield app @pytest.fixture def elimina_variable_entorno(app): import os if", "\"/accounts/entity/edit/dulce\", data={ \"id_fiscal\": \"J08100000078\", \"nombre_comercial\": \"<NAME>\", \"razon_social\": \"Dulces Mundo Sabor Sociedad Anonima\", \"telefono1\":", "post.data def test_formulario_nueva_unidad(client, auth): from cacao_accounting.database import Unidad auth.login() response = client.get(\"/accounts/unit/new\") assert", "Entidad auth.login() response = client.get(\"/accounts/entity/new\") assert b\"Crear Nueva Entidad.\" in response.data entidad =", "in post.data assert b\"dulce\" in post.data def test_formulario_nueva_unidad(client, auth): from cacao_accounting.database import Unidad", "def test_formulario_nueva_entidad(client, auth): from cacao_accounting.database import Entidad auth.login() response = client.get(\"/accounts/entity/new\") assert b\"Crear", "base_data, dev_data @pytest.fixture(scope=\"module\", autouse=True) def app(): from cacao_accounting.config import SQLITE app = app_factory(", "unidad = Unidad.query.filter_by(unidad=\"Test Form\").first() assert unidad is None post = client.post( \"/accounts/unit/new\", data={", "Mundo Sabor Sociedad Anonima\" in post.data assert b\"+506 8771 0980\" in post.data assert", "\"candy.org\", }, follow_redirects=True, ) assert b\"<NAME>\" in post.data assert b\"J08100000078\" in post.data assert", "= client.post( \"/accounts/unit/new\", data={ \"id\": \"test\", \"nombre\": \"Test Form\", \"entidad\": \"cacao\", \"correo_electronico\": \"<EMAIL>\",", "\"/accounts/entity/new\", data={ \"nombre_comercial\": \"Test Form\", \"razon_social\": \"Test Form\", \"id_fiscal\": \"Test Form\", \"id\": \"Test", "Unidad.query.filter_by(unidad=\"Test Form\").first() assert unidad is None post = client.post( \"/accounts/unit/new\", data={ \"id\": \"test\",", "assert b\"dulce\" in post.data def test_formulario_nueva_unidad(client, auth): from cacao_accounting.database import Unidad auth.login() response", "def app(): from cacao_accounting.config import SQLITE app = app_factory( { \"SECRET_KEY\": \"<KEY>\", \"SQLALCHEMY_DATABASE_URI\":", "\"Test Form\", \"id\": \"Test Form\", \"moneda\": \"NIO\", \"tipo_entidad\": \"Asociación\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\",", "with app.app_context(): database.drop_all() database.create_all() base_data() dev_data() app.app_context().push() yield app @pytest.fixture def elimina_variable_entorno(app): import", "follow_redirects=True, ) entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad is not None assert entidad.moneda", "import database from cacao_accounting.datos import base_data, dev_data @pytest.fixture(scope=\"module\", autouse=True) def app(): from cacao_accounting.config", "the specific language governing permissions and # limitations under the License. # #", "0980\", \"telefono2\": \"+506 8667 2108\", \"correo_electronico\": \"<EMAIL>\", \"fax\": \"+506 7242 2789\", \"web\": \"candy.org\",", "@pytest.fixture def client(app): return app.test_client() @pytest.fixture def runner(app): return app.test_cli_runner() class AuthActions: def", "Form\", \"id_fiscal\": \"Test Form\", \"id\": \"Test Form\", \"moneda\": \"NIO\", \"tipo_entidad\": \"Asociación\", \"correo_electronico\": \"<EMAIL>\",", "\"SECRET_KEY\": \"<KEY>\", \"SQLALCHEMY_DATABASE_URI\": \"sqlite://\", \"SQLALCHEMY_TRACK_MODIFICATIONS\": False, \"TESTING\": True, \"WTF_CSRF_ENABLED\": False, \"DEBUG\": True, \"DESKTOPMODE\":", "def login(self): return self._client.post(\"/login\", data={\"usuario\": \"cacao\", \"acceso\": \"cacao\"}) def logout(self): return self._client.get(\"/salir\") @pytest.fixture", "b\"+506 7242 2789\" in post.data assert b\"candy.org\" in post.data assert b\"dulce\" in post.data", "client.get(\"/accounts/entity/new\") assert b\"Crear Nueva Entidad.\" in response.data entidad = Entidad.query.filter_by(entidad=\"Test Form\").first() assert entidad", "\"DEBUG\": True, \"DESKTOPMODE\": False, } ) with app.app_context(): database.drop_all() database.create_all() base_data() dev_data() app.app_context().push()", "# - <NAME> # pylint: disable=redefined-outer-name import pytest from cacao_accounting import create_app as", "Form\").first() assert entidad is None post = client.post( \"/accounts/entity/new\", data={ \"nombre_comercial\": \"Test Form\",", "\"NIO\", \"tipo_entidad\": \"Asociación\", \"correo_electronico\": \"<EMAIL>\", \"web\": \"https://cacao.io\", \"telefono1\": \"+505 8771 0980\", \"telefono2\": \"+505", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "@pytest.fixture def auth(client): return AuthActions(client) def test_formulario_nueva_entidad(client, auth): from cacao_accounting.database import Entidad auth.login()", "= app_factory( { \"SECRET_KEY\": \"<KEY>\", \"SQLALCHEMY_DATABASE_URI\": \"sqlite://\", \"SQLALCHEMY_TRACK_MODIFICATIONS\": False, \"TESTING\": True, \"WTF_CSRF_ENABLED\": False,", "app_factory from cacao_accounting.database import database from cacao_accounting.datos import base_data, dev_data @pytest.fixture(scope=\"module\", autouse=True) def", "client(app): return app.test_client() @pytest.fixture def runner(app): return app.test_cli_runner() class AuthActions: def __init__(self, client):", "def client(app): return app.test_client() @pytest.fixture def runner(app): return app.test_cli_runner() class AuthActions: def __init__(self,", "== \"NIO\" assert entidad.entidad == \"Test Form\" def test_formulario_editar_entidad(client, auth): from cacao_accounting.database import", "import os if os.environ.get(\"CACAO_TEST\"): os.environ.pop(\"CACAO_TEST\") app.config[\"ENV\"] = \"production\" else: pass @pytest.fixture def client(app):", "\"NIO\" assert entidad.entidad == \"Test Form\" def test_formulario_editar_entidad(client, auth): from cacao_accounting.database import Entidad", "de Negocios.\" in response.data unidad = Unidad.query.filter_by(unidad=\"Test Form\").first() assert unidad is None post" ]
[ "0.5 * dims, delta - dims, delta) return np.sqrt((delta**2).sum(axis=-1)) orbits = np.array([ 1,", "6, 7, 4, 6, 6, 6, 5, 6, 7, 7, 5, 7, 6,", "2, 3, 4, 3, 3, 4, 3, 4, 4, 4, 4, 3, 4,", "2, 2, 2, 3, 4, 3, 3, 4, 3, 4, 4, 4, 4,", "4, 4, 4, 5, 7, 4, 6, 6, 7, 4, 6, 6, 6,", "5, 5, 6, 8, 7, 6, 6, 8, 6, 9, 5, 6, 4,", "5, 6, 4, 6, 6, 7, 8, 6, 6, 8, 7, 6, 7,", "dims=kwargs[\"dims\"]) else: BT = BallTree(X, metric=kwargs[\"metric\"]) rng_con = radius_neighbors_graph(BT, r_cut, n_jobs=1, mode='connectivity') A", "6, 7, 8, 6, 6, 8, 7, 6, 7, 7, 8, 5, 6,", "7, 5, 7, 6, 7, 6, 5, 5, 6, 8, 7, 6, 6,", "+ \"../../../orca/orca.exe\") def pbc(x0, x1, dims): delta = np.abs(x0 - x1) delta =", "BallTree, radius_neighbors_graph import networkx as nx __all__ = [\"ORCA_PATH\", \"pbc\", \"orbits\", \"weights\", \"compute_graph\"]", "return np.sqrt((delta**2).sum(axis=-1)) orbits = np.array([ 1, 2, 2, 2, 3, 4, 3, 3,", "3, 4, 6, 5, 4, 5, 6, 6, 4, 4, 4, 5, 7,", "delta = np.abs(x0 - x1) delta = np.where(delta > 0.5 * dims, delta", "6, 7, 7, 8, 5, 6, 6, 4 ], dtype=np.float) weights = 1.", "7, 6, 7, 6, 5, 5, 6, 8, 7, 6, 6, 8, 6,", "networkx as nx __all__ = [\"ORCA_PATH\", \"pbc\", \"orbits\", \"weights\", \"compute_graph\"] ORCA_PATH = os.path.abspath(os.path.abspath(__file__)", "6, 6, 4 ], dtype=np.float) weights = 1. - np.log(orbits) / np.log(73.) def", "5, 6, 6, 4 ], dtype=np.float) weights = 1. - np.log(orbits) / np.log(73.)", "dtype=np.float) weights = 1. - np.log(orbits) / np.log(73.) def compute_graph(X, r_cut, **kwargs): if", "9, 5, 6, 4, 6, 6, 7, 8, 6, 6, 8, 7, 6,", "np from sklearn.neighbors import BallTree, radius_neighbors_graph import networkx as nx __all__ = [\"ORCA_PATH\",", "= BallTree(X, metric=kwargs[\"metric\"]) rng_con = radius_neighbors_graph(BT, r_cut, n_jobs=1, mode='connectivity') A = np.matrix(rng_con.toarray()) G", "dims, delta - dims, delta) return np.sqrt((delta**2).sum(axis=-1)) orbits = np.array([ 1, 2, 2,", "8, 5, 6, 6, 4 ], dtype=np.float) weights = 1. - np.log(orbits) /", "1, 2, 2, 2, 3, 4, 3, 3, 4, 3, 4, 4, 4,", "radius_neighbors_graph import networkx as nx __all__ = [\"ORCA_PATH\", \"pbc\", \"orbits\", \"weights\", \"compute_graph\"] ORCA_PATH", "= radius_neighbors_graph(BT, r_cut, n_jobs=1, mode='connectivity') A = np.matrix(rng_con.toarray()) G = nx.from_numpy_matrix(A) return G", "metric=kwargs[\"metric\"], dims=kwargs[\"dims\"]) else: BT = BallTree(X, metric=kwargs[\"metric\"]) rng_con = radius_neighbors_graph(BT, r_cut, n_jobs=1, mode='connectivity')", "6, 7, 7, 5, 7, 6, 7, 6, 5, 5, 6, 8, 7,", "6, 5, 4, 5, 6, 6, 4, 4, 4, 5, 7, 4, 6,", "compute_graph(X, r_cut, **kwargs): if kwargs[\"dims\"] is not None: BT = BallTree(X, metric=kwargs[\"metric\"], dims=kwargs[\"dims\"])", "nx __all__ = [\"ORCA_PATH\", \"pbc\", \"orbits\", \"weights\", \"compute_graph\"] ORCA_PATH = os.path.abspath(os.path.abspath(__file__) + \"../../../orca/orca.exe\")", "pbc(x0, x1, dims): delta = np.abs(x0 - x1) delta = np.where(delta > 0.5", "= np.abs(x0 - x1) delta = np.where(delta > 0.5 * dims, delta -", "np.array([ 1, 2, 2, 2, 3, 4, 3, 3, 4, 3, 4, 4,", "7, 4, 6, 6, 7, 4, 6, 6, 6, 5, 6, 7, 7,", "6, 4, 4, 4, 5, 7, 4, 6, 6, 7, 4, 6, 6,", "__all__ = [\"ORCA_PATH\", \"pbc\", \"orbits\", \"weights\", \"compute_graph\"] ORCA_PATH = os.path.abspath(os.path.abspath(__file__) + \"../../../orca/orca.exe\") def", "def pbc(x0, x1, dims): delta = np.abs(x0 - x1) delta = np.where(delta >", "numpy as np from sklearn.neighbors import BallTree, radius_neighbors_graph import networkx as nx __all__", "3, 3, 4, 3, 4, 4, 4, 4, 3, 4, 6, 5, 4,", "4, 5, 6, 6, 4, 4, 4, 5, 7, 4, 6, 6, 7,", "8, 7, 6, 6, 8, 6, 9, 5, 6, 4, 6, 6, 7,", "3, 4, 4, 4, 4, 3, 4, 6, 5, 4, 5, 6, 6,", "dims, delta) return np.sqrt((delta**2).sum(axis=-1)) orbits = np.array([ 1, 2, 2, 2, 3, 4,", "BallTree(X, metric=kwargs[\"metric\"]) rng_con = radius_neighbors_graph(BT, r_cut, n_jobs=1, mode='connectivity') A = np.matrix(rng_con.toarray()) G =", "as nx __all__ = [\"ORCA_PATH\", \"pbc\", \"orbits\", \"weights\", \"compute_graph\"] ORCA_PATH = os.path.abspath(os.path.abspath(__file__) +", "3, 4, 3, 3, 4, 3, 4, 4, 4, 4, 3, 4, 6,", "import os import numpy as np from sklearn.neighbors import BallTree, radius_neighbors_graph import networkx", "6, 8, 7, 6, 7, 7, 8, 5, 6, 6, 4 ], dtype=np.float)", "4, 4, 4, 3, 4, 6, 5, 4, 5, 6, 6, 4, 4,", "1. - np.log(orbits) / np.log(73.) def compute_graph(X, r_cut, **kwargs): if kwargs[\"dims\"] is not", "2, 2, 3, 4, 3, 3, 4, 3, 4, 4, 4, 4, 3,", "7, 6, 5, 5, 6, 8, 7, 6, 6, 8, 6, 9, 5,", "rng_con = radius_neighbors_graph(BT, r_cut, n_jobs=1, mode='connectivity') A = np.matrix(rng_con.toarray()) G = nx.from_numpy_matrix(A) return", "7, 8, 5, 6, 6, 4 ], dtype=np.float) weights = 1. - np.log(orbits)", "x1) delta = np.where(delta > 0.5 * dims, delta - dims, delta) return", "6, 6, 8, 6, 9, 5, 6, 4, 6, 6, 7, 8, 6,", "3, 4, 3, 4, 4, 4, 4, 3, 4, 6, 5, 4, 5,", "BT = BallTree(X, metric=kwargs[\"metric\"]) rng_con = radius_neighbors_graph(BT, r_cut, n_jobs=1, mode='connectivity') A = np.matrix(rng_con.toarray())", "6, 6, 7, 8, 6, 6, 8, 7, 6, 7, 7, 8, 5,", "4, 6, 5, 4, 5, 6, 6, 4, 4, 4, 5, 7, 4,", "= np.where(delta > 0.5 * dims, delta - dims, delta) return np.sqrt((delta**2).sum(axis=-1)) orbits", "as np from sklearn.neighbors import BallTree, radius_neighbors_graph import networkx as nx __all__ =", "7, 6, 7, 7, 8, 5, 6, 6, 4 ], dtype=np.float) weights =", "\"../../../orca/orca.exe\") def pbc(x0, x1, dims): delta = np.abs(x0 - x1) delta = np.where(delta", "not None: BT = BallTree(X, metric=kwargs[\"metric\"], dims=kwargs[\"dims\"]) else: BT = BallTree(X, metric=kwargs[\"metric\"]) rng_con", "7, 8, 6, 6, 8, 7, 6, 7, 7, 8, 5, 6, 6,", "5, 4, 5, 6, 6, 4, 4, 4, 5, 7, 4, 6, 6,", "metric=kwargs[\"metric\"]) rng_con = radius_neighbors_graph(BT, r_cut, n_jobs=1, mode='connectivity') A = np.matrix(rng_con.toarray()) G = nx.from_numpy_matrix(A)", "\"\"\" util.py Some utility functions \"\"\" import os import numpy as np from", "5, 7, 4, 6, 6, 7, 4, 6, 6, 6, 5, 6, 7,", "8, 6, 6, 8, 7, 6, 7, 7, 8, 5, 6, 6, 4", "4 ], dtype=np.float) weights = 1. - np.log(orbits) / np.log(73.) def compute_graph(X, r_cut,", "orbits = np.array([ 1, 2, 2, 2, 3, 4, 3, 3, 4, 3,", "os import numpy as np from sklearn.neighbors import BallTree, radius_neighbors_graph import networkx as", "[\"ORCA_PATH\", \"pbc\", \"orbits\", \"weights\", \"compute_graph\"] ORCA_PATH = os.path.abspath(os.path.abspath(__file__) + \"../../../orca/orca.exe\") def pbc(x0, x1,", "4, 3, 4, 4, 4, 4, 3, 4, 6, 5, 4, 5, 6,", "6, 4, 6, 6, 7, 8, 6, 6, 8, 7, 6, 7, 7,", "ORCA_PATH = os.path.abspath(os.path.abspath(__file__) + \"../../../orca/orca.exe\") def pbc(x0, x1, dims): delta = np.abs(x0 -", "4, 6, 6, 7, 4, 6, 6, 6, 5, 6, 7, 7, 5,", "6, 6, 7, 4, 6, 6, 6, 5, 6, 7, 7, 5, 7,", "6, 6, 4, 4, 4, 5, 7, 4, 6, 6, 7, 4, 6,", "sklearn.neighbors import BallTree, radius_neighbors_graph import networkx as nx __all__ = [\"ORCA_PATH\", \"pbc\", \"orbits\",", "\"orbits\", \"weights\", \"compute_graph\"] ORCA_PATH = os.path.abspath(os.path.abspath(__file__) + \"../../../orca/orca.exe\") def pbc(x0, x1, dims): delta", "4, 5, 7, 4, 6, 6, 7, 4, 6, 6, 6, 5, 6,", "6, 4 ], dtype=np.float) weights = 1. - np.log(orbits) / np.log(73.) def compute_graph(X,", "- np.log(orbits) / np.log(73.) def compute_graph(X, r_cut, **kwargs): if kwargs[\"dims\"] is not None:", "np.abs(x0 - x1) delta = np.where(delta > 0.5 * dims, delta - dims,", "= 1. - np.log(orbits) / np.log(73.) def compute_graph(X, r_cut, **kwargs): if kwargs[\"dims\"] is", "dims): delta = np.abs(x0 - x1) delta = np.where(delta > 0.5 * dims,", "= BallTree(X, metric=kwargs[\"metric\"], dims=kwargs[\"dims\"]) else: BT = BallTree(X, metric=kwargs[\"metric\"]) rng_con = radius_neighbors_graph(BT, r_cut,", "None: BT = BallTree(X, metric=kwargs[\"metric\"], dims=kwargs[\"dims\"]) else: BT = BallTree(X, metric=kwargs[\"metric\"]) rng_con =", "/ np.log(73.) def compute_graph(X, r_cut, **kwargs): if kwargs[\"dims\"] is not None: BT =", "* dims, delta - dims, delta) return np.sqrt((delta**2).sum(axis=-1)) orbits = np.array([ 1, 2,", "\"compute_graph\"] ORCA_PATH = os.path.abspath(os.path.abspath(__file__) + \"../../../orca/orca.exe\") def pbc(x0, x1, dims): delta = np.abs(x0", "4, 4, 5, 7, 4, 6, 6, 7, 4, 6, 6, 6, 5,", "6, 5, 5, 6, 8, 7, 6, 6, 8, 6, 9, 5, 6,", "6, 8, 6, 9, 5, 6, 4, 6, 6, 7, 8, 6, 6,", "import networkx as nx __all__ = [\"ORCA_PATH\", \"pbc\", \"orbits\", \"weights\", \"compute_graph\"] ORCA_PATH =", "= np.array([ 1, 2, 2, 2, 3, 4, 3, 3, 4, 3, 4,", "8, 7, 6, 7, 7, 8, 5, 6, 6, 4 ], dtype=np.float) weights", "= [\"ORCA_PATH\", \"pbc\", \"orbits\", \"weights\", \"compute_graph\"] ORCA_PATH = os.path.abspath(os.path.abspath(__file__) + \"../../../orca/orca.exe\") def pbc(x0,", "Some utility functions \"\"\" import os import numpy as np from sklearn.neighbors import", "import numpy as np from sklearn.neighbors import BallTree, radius_neighbors_graph import networkx as nx", "4, 3, 3, 4, 3, 4, 4, 4, 4, 3, 4, 6, 5,", "7, 4, 6, 6, 6, 5, 6, 7, 7, 5, 7, 6, 7,", "6, 5, 6, 7, 7, 5, 7, 6, 7, 6, 5, 5, 6,", "utility functions \"\"\" import os import numpy as np from sklearn.neighbors import BallTree,", "4, 6, 6, 7, 8, 6, 6, 8, 7, 6, 7, 7, 8,", "\"\"\" import os import numpy as np from sklearn.neighbors import BallTree, radius_neighbors_graph import", "5, 7, 6, 7, 6, 5, 5, 6, 8, 7, 6, 6, 8,", "4, 3, 4, 6, 5, 4, 5, 6, 6, 4, 4, 4, 5,", "np.log(73.) def compute_graph(X, r_cut, **kwargs): if kwargs[\"dims\"] is not None: BT = BallTree(X,", "4, 6, 6, 6, 5, 6, 7, 7, 5, 7, 6, 7, 6,", "- dims, delta) return np.sqrt((delta**2).sum(axis=-1)) orbits = np.array([ 1, 2, 2, 2, 3,", "**kwargs): if kwargs[\"dims\"] is not None: BT = BallTree(X, metric=kwargs[\"metric\"], dims=kwargs[\"dims\"]) else: BT", "weights = 1. - np.log(orbits) / np.log(73.) def compute_graph(X, r_cut, **kwargs): if kwargs[\"dims\"]", "8, 6, 9, 5, 6, 4, 6, 6, 7, 8, 6, 6, 8,", "else: BT = BallTree(X, metric=kwargs[\"metric\"]) rng_con = radius_neighbors_graph(BT, r_cut, n_jobs=1, mode='connectivity') A =", "from sklearn.neighbors import BallTree, radius_neighbors_graph import networkx as nx __all__ = [\"ORCA_PATH\", \"pbc\",", "\"pbc\", \"orbits\", \"weights\", \"compute_graph\"] ORCA_PATH = os.path.abspath(os.path.abspath(__file__) + \"../../../orca/orca.exe\") def pbc(x0, x1, dims):", "util.py Some utility functions \"\"\" import os import numpy as np from sklearn.neighbors", "delta) return np.sqrt((delta**2).sum(axis=-1)) orbits = np.array([ 1, 2, 2, 2, 3, 4, 3,", "5, 6, 7, 7, 5, 7, 6, 7, 6, 5, 5, 6, 8,", "np.where(delta > 0.5 * dims, delta - dims, delta) return np.sqrt((delta**2).sum(axis=-1)) orbits =", "6, 9, 5, 6, 4, 6, 6, 7, 8, 6, 6, 8, 7,", "is not None: BT = BallTree(X, metric=kwargs[\"metric\"], dims=kwargs[\"dims\"]) else: BT = BallTree(X, metric=kwargs[\"metric\"])", "delta = np.where(delta > 0.5 * dims, delta - dims, delta) return np.sqrt((delta**2).sum(axis=-1))", "7, 7, 8, 5, 6, 6, 4 ], dtype=np.float) weights = 1. -", "delta - dims, delta) return np.sqrt((delta**2).sum(axis=-1)) orbits = np.array([ 1, 2, 2, 2,", "functions \"\"\" import os import numpy as np from sklearn.neighbors import BallTree, radius_neighbors_graph", "7, 6, 6, 8, 6, 9, 5, 6, 4, 6, 6, 7, 8,", "BallTree(X, metric=kwargs[\"metric\"], dims=kwargs[\"dims\"]) else: BT = BallTree(X, metric=kwargs[\"metric\"]) rng_con = radius_neighbors_graph(BT, r_cut, n_jobs=1,", "- x1) delta = np.where(delta > 0.5 * dims, delta - dims, delta)", "if kwargs[\"dims\"] is not None: BT = BallTree(X, metric=kwargs[\"metric\"], dims=kwargs[\"dims\"]) else: BT =", "], dtype=np.float) weights = 1. - np.log(orbits) / np.log(73.) def compute_graph(X, r_cut, **kwargs):", "os.path.abspath(os.path.abspath(__file__) + \"../../../orca/orca.exe\") def pbc(x0, x1, dims): delta = np.abs(x0 - x1) delta", "np.sqrt((delta**2).sum(axis=-1)) orbits = np.array([ 1, 2, 2, 2, 3, 4, 3, 3, 4,", "6, 8, 7, 6, 6, 8, 6, 9, 5, 6, 4, 6, 6,", "kwargs[\"dims\"] is not None: BT = BallTree(X, metric=kwargs[\"metric\"], dims=kwargs[\"dims\"]) else: BT = BallTree(X,", "def compute_graph(X, r_cut, **kwargs): if kwargs[\"dims\"] is not None: BT = BallTree(X, metric=kwargs[\"metric\"],", "6, 6, 8, 7, 6, 7, 7, 8, 5, 6, 6, 4 ],", "\"weights\", \"compute_graph\"] ORCA_PATH = os.path.abspath(os.path.abspath(__file__) + \"../../../orca/orca.exe\") def pbc(x0, x1, dims): delta =", "np.log(orbits) / np.log(73.) def compute_graph(X, r_cut, **kwargs): if kwargs[\"dims\"] is not None: BT", "4, 4, 3, 4, 6, 5, 4, 5, 6, 6, 4, 4, 4,", "BT = BallTree(X, metric=kwargs[\"metric\"], dims=kwargs[\"dims\"]) else: BT = BallTree(X, metric=kwargs[\"metric\"]) rng_con = radius_neighbors_graph(BT,", "import BallTree, radius_neighbors_graph import networkx as nx __all__ = [\"ORCA_PATH\", \"pbc\", \"orbits\", \"weights\",", "r_cut, **kwargs): if kwargs[\"dims\"] is not None: BT = BallTree(X, metric=kwargs[\"metric\"], dims=kwargs[\"dims\"]) else:", "5, 6, 8, 7, 6, 6, 8, 6, 9, 5, 6, 4, 6,", "6, 6, 6, 5, 6, 7, 7, 5, 7, 6, 7, 6, 5,", "> 0.5 * dims, delta - dims, delta) return np.sqrt((delta**2).sum(axis=-1)) orbits = np.array([", "6, 7, 6, 5, 5, 6, 8, 7, 6, 6, 8, 6, 9,", "x1, dims): delta = np.abs(x0 - x1) delta = np.where(delta > 0.5 *", "7, 7, 5, 7, 6, 7, 6, 5, 5, 6, 8, 7, 6,", "5, 6, 6, 4, 4, 4, 5, 7, 4, 6, 6, 7, 4,", "4, 4, 4, 4, 3, 4, 6, 5, 4, 5, 6, 6, 4,", "6, 6, 5, 6, 7, 7, 5, 7, 6, 7, 6, 5, 5,", "= os.path.abspath(os.path.abspath(__file__) + \"../../../orca/orca.exe\") def pbc(x0, x1, dims): delta = np.abs(x0 - x1)" ]
[ "[ tvh_genres['NEWS'] ], \"Comedy\": [ tvh_genres['COMEDY'] ], \"Documentary\": [ tvh_genres['DOCUMENTARY'] ], \"Drama\": [", "tvh_genres['DOCUMENTARY'] ], \"Drama\": [ tvh_genres['MOVIE'] ], \"Educational\": [ tvh_genres['EDUCATIONAL'] ], \"Events & Specials\":", "copyright notice and this permission notice shall be included in all copies or", "ustvgo_genres = { \"Action & Adventure\": [ tvh_genres['ADVENTURE'] ], \"Business\": [ tvh_genres['NEWS'] ],", "a copy of this software and associated documentation files (the \"Software\"), to deal", "from lib.tvheadend.epg_category import tvh_genres ustvgo_channels = '<KEY>' ustvgo_png = '<KEY>' ustvgo_stream = 'gfpMXf5BjIUCXRpNtNHFzfkQtxdMkqSGgfW='", "Copyright (C) 2021 ROCKY4546 https://github.com/rocky4546 This file is part of Cabernet Permission is", "permission notice shall be included in all copies or substantial portions of the", "& Lifestyle\": [ tvh_genres['FITNESS'] ], \"Horror\": [ tvh_genres['SF'] ], \"Kids\": [ tvh_genres['KIDS'] ],", "], \"None\": None, \"Other\": None, \"Pro Sports\": [ tvh_genres['SPORT'] ], \"Reality\": [ tvh_genres['GAME']", "to permit persons to whom the Software is furnished to do so, subject", "= '<KEY>' ustvgo_png = '<KEY>' ustvgo_stream = 'gfpMXf5BjIUCXRpNtNHFzfkQtxdMkqSGgfW=' ustvgo_epg = '<KEY>' ustvgo_program =", "{ \"Action & Adventure\": [ tvh_genres['ADVENTURE'] ], \"Business\": [ tvh_genres['NEWS'] ], \"Comedy\": [", "ustvgo_groups = { } ustvgo_genres = { \"Action & Adventure\": [ tvh_genres['ADVENTURE'] ],", "the Software. \"\"\" from lib.tvheadend.epg_category import groups from lib.tvheadend.epg_category import tvh_genres ustvgo_channels =", "], \"Documentary\": [ tvh_genres['DOCUMENTARY'] ], \"Drama\": [ tvh_genres['MOVIE'] ], \"Educational\": [ tvh_genres['EDUCATIONAL'] ],", "'<KEY>' ustvgo_png = '<KEY>' ustvgo_stream = 'gfpMXf5BjIUCXRpNtNHFzfkQtxdMkqSGgfW=' ustvgo_epg = '<KEY>' ustvgo_program = '<KEY>", "tvh_genres['SF'] ], \"Talk & Interview\": [ tvh_genres['TALK_SHOW'] ], \"Tech & Gaming\": [ tvh_genres['TECHNOLOGY']", "[ tvh_genres['FITNESS'] ], \"Horror\": [ tvh_genres['SF'] ], \"Kids\": [ tvh_genres['KIDS'] ], \"Music\": [", "& Interview\": [ tvh_genres['TALK_SHOW'] ], \"Tech & Gaming\": [ tvh_genres['TECHNOLOGY'] ], \"Travel\": [", "this software and associated documentation files (the \"Software\"), to deal in the Software", "the following conditions: The above copyright notice and this permission notice shall be", "conditions: The above copyright notice and this permission notice shall be included in", "ustvgo_stream = 'gfpMXf5BjIUCXRpNtNHFzfkQtxdMkqSGgfW=' ustvgo_epg = '<KEY>' ustvgo_program = '<KEY> ustvgo_groups = { }", "'<KEY> ustvgo_groups = { } ustvgo_genres = { \"Action & Adventure\": [ tvh_genres['ADVENTURE']", "import tvh_genres ustvgo_channels = '<KEY>' ustvgo_png = '<KEY>' ustvgo_stream = 'gfpMXf5BjIUCXRpNtNHFzfkQtxdMkqSGgfW=' ustvgo_epg =", "], \"Suspense\": [ tvh_genres['SF'] ], \"Talk & Interview\": [ tvh_genres['TALK_SHOW'] ], \"Tech &", "of the Software. \"\"\" from lib.tvheadend.epg_category import groups from lib.tvheadend.epg_category import tvh_genres ustvgo_channels", "[ tvh_genres['GAME'] ], \"Health & Lifestyle\": [ tvh_genres['FITNESS'] ], \"Horror\": [ tvh_genres['SF'] ],", "free of charge, to any person obtaining a copy of this software and", "and this permission notice shall be included in all copies or substantial portions", "tvh_genres['TALK_SHOW'] ], \"Tech & Gaming\": [ tvh_genres['TECHNOLOGY'] ], \"Travel\": [ tvh_genres['TRAVEL'] ], \"Variety", "and to permit persons to whom the Software is furnished to do so,", "None, \"Other\": None, \"Pro Sports\": [ tvh_genres['SPORT'] ], \"Reality\": [ tvh_genres['GAME'] ], \"Science\":", "Show\": [ tvh_genres['GAME'] ], \"Health & Lifestyle\": [ tvh_genres['FITNESS'] ], \"Horror\": [ tvh_genres['SF']", "Permission is hereby granted, free of charge, to any person obtaining a copy", "2021 ROCKY4546 https://github.com/rocky4546 This file is part of Cabernet Permission is hereby granted,", "Lifestyle\": [ tvh_genres['FITNESS'] ], \"Horror\": [ tvh_genres['SF'] ], \"Kids\": [ tvh_genres['KIDS'] ], \"Music\":", "furnished to do so, subject to the following conditions: The above copyright notice", "from lib.tvheadend.epg_category import groups from lib.tvheadend.epg_category import tvh_genres ustvgo_channels = '<KEY>' ustvgo_png =", "[ tvh_genres['COMEDY'] ], \"Documentary\": [ tvh_genres['DOCUMENTARY'] ], \"Drama\": [ tvh_genres['MOVIE'] ], \"Educational\": [", "all copies or substantial portions of the Software. \"\"\" from lib.tvheadend.epg_category import groups", "tvh_genres['KIDS'] ], \"Fantasy\": [ tvh_genres['SF'] ], \"Food & Cooking\": [ tvh_genres['COOKING'] ], \"Game", "None, \"Pro Sports\": [ tvh_genres['SPORT'] ], \"Reality\": [ tvh_genres['GAME'] ], \"Science\": [ tvh_genres['SCIENCE']", "pylama:ignore=E203,E221 \"\"\" MIT License Copyright (C) 2021 ROCKY4546 https://github.com/rocky4546 This file is part", "[ tvh_genres['SCIENCE'] ], \"Science Fiction\": [ tvh_genres['SF'] ], \"Sports\": [ tvh_genres['SPORT'] ], \"Suspense\":", "\"Suspense\": [ tvh_genres['SF'] ], \"Talk & Interview\": [ tvh_genres['TALK_SHOW'] ], \"Tech & Gaming\":", "Interview\": [ tvh_genres['TALK_SHOW'] ], \"Tech & Gaming\": [ tvh_genres['TECHNOLOGY'] ], \"Travel\": [ tvh_genres['TRAVEL']", "whom the Software is furnished to do so, subject to the following conditions:", "permit persons to whom the Software is furnished to do so, subject to", "[ tvh_genres['MOVIE'] ], \"Educational\": [ tvh_genres['EDUCATIONAL'] ], \"Events & Specials\": [ tvh_genres['SPORT_SPECIAL'] ],", "rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of", "the Software without restriction, including without limitation the rights to use, copy, modify,", "], \"Horror\": [ tvh_genres['SF'] ], \"Kids\": [ tvh_genres['KIDS'] ], \"Music\": [ tvh_genres['MUSIC'] ],", "any person obtaining a copy of this software and associated documentation files (the", "person obtaining a copy of this software and associated documentation files (the \"Software\"),", "the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies", "tvh_genres['COMEDY'] ], \"Documentary\": [ tvh_genres['DOCUMENTARY'] ], \"Drama\": [ tvh_genres['MOVIE'] ], \"Educational\": [ tvh_genres['EDUCATIONAL']", "], \"Kids\": [ tvh_genres['KIDS'] ], \"Music\": [ tvh_genres['MUSIC'] ], \"None\": None, \"Other\": None,", "'<KEY>' ustvgo_program = '<KEY> ustvgo_groups = { } ustvgo_genres = { \"Action &", "[ tvh_genres['TECHNOLOGY'] ], \"Travel\": [ tvh_genres['TRAVEL'] ], \"Variety Shows\": [ tvh_genres['VARIETY'] ] }", "\"Comedy\": [ tvh_genres['COMEDY'] ], \"Documentary\": [ tvh_genres['DOCUMENTARY'] ], \"Drama\": [ tvh_genres['MOVIE'] ], \"Educational\":", "\"Science\": [ tvh_genres['SCIENCE'] ], \"Science Fiction\": [ tvh_genres['SF'] ], \"Sports\": [ tvh_genres['SPORT'] ],", "copies of the Software, and to permit persons to whom the Software is", "\"Kids\": [ tvh_genres['KIDS'] ], \"Music\": [ tvh_genres['MUSIC'] ], \"None\": None, \"Other\": None, \"Pro", "\"Sports\": [ tvh_genres['SPORT'] ], \"Suspense\": [ tvh_genres['SF'] ], \"Talk & Interview\": [ tvh_genres['TALK_SHOW']", "(C) 2021 ROCKY4546 https://github.com/rocky4546 This file is part of Cabernet Permission is hereby", "without restriction, including without limitation the rights to use, copy, modify, merge, publish,", "merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit", "\"Documentary\": [ tvh_genres['DOCUMENTARY'] ], \"Drama\": [ tvh_genres['MOVIE'] ], \"Educational\": [ tvh_genres['EDUCATIONAL'] ], \"Events", "], \"Business\": [ tvh_genres['NEWS'] ], \"Comedy\": [ tvh_genres['COMEDY'] ], \"Documentary\": [ tvh_genres['DOCUMENTARY'] ],", "& Adventure\": [ tvh_genres['ADVENTURE'] ], \"Business\": [ tvh_genres['NEWS'] ], \"Comedy\": [ tvh_genres['COMEDY'] ],", "], \"Educational\": [ tvh_genres['EDUCATIONAL'] ], \"Events & Specials\": [ tvh_genres['SPORT_SPECIAL'] ], \"Family\": [", "lib.tvheadend.epg_category import groups from lib.tvheadend.epg_category import tvh_genres ustvgo_channels = '<KEY>' ustvgo_png = '<KEY>'", "lib.tvheadend.epg_category import tvh_genres ustvgo_channels = '<KEY>' ustvgo_png = '<KEY>' ustvgo_stream = 'gfpMXf5BjIUCXRpNtNHFzfkQtxdMkqSGgfW=' ustvgo_epg", "tvh_genres['GAME'] ], \"Health & Lifestyle\": [ tvh_genres['FITNESS'] ], \"Horror\": [ tvh_genres['SF'] ], \"Kids\":", "tvh_genres['SF'] ], \"Kids\": [ tvh_genres['KIDS'] ], \"Music\": [ tvh_genres['MUSIC'] ], \"None\": None, \"Other\":", "], \"Game Show\": [ tvh_genres['GAME'] ], \"Health & Lifestyle\": [ tvh_genres['FITNESS'] ], \"Horror\":", "associated documentation files (the \"Software\"), to deal in the Software without restriction, including", "copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and", "\"Food & Cooking\": [ tvh_genres['COOKING'] ], \"Game Show\": [ tvh_genres['GAME'] ], \"Health &", "notice shall be included in all copies or substantial portions of the Software.", "the Software, and to permit persons to whom the Software is furnished to", "\"Fantasy\": [ tvh_genres['SF'] ], \"Food & Cooking\": [ tvh_genres['COOKING'] ], \"Game Show\": [", "} ustvgo_genres = { \"Action & Adventure\": [ tvh_genres['ADVENTURE'] ], \"Business\": [ tvh_genres['NEWS']", "\"Family\": [ tvh_genres['KIDS'] ], \"Fantasy\": [ tvh_genres['SF'] ], \"Food & Cooking\": [ tvh_genres['COOKING']", "sublicense, and/or sell copies of the Software, and to permit persons to whom", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,", "This file is part of Cabernet Permission is hereby granted, free of charge,", "this permission notice shall be included in all copies or substantial portions of", "modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to", "tvh_genres['COOKING'] ], \"Game Show\": [ tvh_genres['GAME'] ], \"Health & Lifestyle\": [ tvh_genres['FITNESS'] ],", "following conditions: The above copyright notice and this permission notice shall be included", "MIT License Copyright (C) 2021 ROCKY4546 https://github.com/rocky4546 This file is part of Cabernet", "[ tvh_genres['TALK_SHOW'] ], \"Tech & Gaming\": [ tvh_genres['TECHNOLOGY'] ], \"Travel\": [ tvh_genres['TRAVEL'] ],", "\"\"\" MIT License Copyright (C) 2021 ROCKY4546 https://github.com/rocky4546 This file is part of", "included in all copies or substantial portions of the Software. \"\"\" from lib.tvheadend.epg_category", "\"Talk & Interview\": [ tvh_genres['TALK_SHOW'] ], \"Tech & Gaming\": [ tvh_genres['TECHNOLOGY'] ], \"Travel\":", "groups from lib.tvheadend.epg_category import tvh_genres ustvgo_channels = '<KEY>' ustvgo_png = '<KEY>' ustvgo_stream =", "= { } ustvgo_genres = { \"Action & Adventure\": [ tvh_genres['ADVENTURE'] ], \"Business\":", "distribute, sublicense, and/or sell copies of the Software, and to permit persons to", "\"None\": None, \"Other\": None, \"Pro Sports\": [ tvh_genres['SPORT'] ], \"Reality\": [ tvh_genres['GAME'] ],", "copy of this software and associated documentation files (the \"Software\"), to deal in", "software and associated documentation files (the \"Software\"), to deal in the Software without", "be included in all copies or substantial portions of the Software. \"\"\" from", "Adventure\": [ tvh_genres['ADVENTURE'] ], \"Business\": [ tvh_genres['NEWS'] ], \"Comedy\": [ tvh_genres['COMEDY'] ], \"Documentary\":", "[ tvh_genres['DOCUMENTARY'] ], \"Drama\": [ tvh_genres['MOVIE'] ], \"Educational\": [ tvh_genres['EDUCATIONAL'] ], \"Events &", "Specials\": [ tvh_genres['SPORT_SPECIAL'] ], \"Family\": [ tvh_genres['KIDS'] ], \"Fantasy\": [ tvh_genres['SF'] ], \"Food", "[ tvh_genres['GAME'] ], \"Science\": [ tvh_genres['SCIENCE'] ], \"Science Fiction\": [ tvh_genres['SF'] ], \"Sports\":", "], \"Science\": [ tvh_genres['SCIENCE'] ], \"Science Fiction\": [ tvh_genres['SF'] ], \"Sports\": [ tvh_genres['SPORT']", "tvh_genres['SPORT'] ], \"Suspense\": [ tvh_genres['SF'] ], \"Talk & Interview\": [ tvh_genres['TALK_SHOW'] ], \"Tech", "], \"Family\": [ tvh_genres['KIDS'] ], \"Fantasy\": [ tvh_genres['SF'] ], \"Food & Cooking\": [", "\"\"\" from lib.tvheadend.epg_category import groups from lib.tvheadend.epg_category import tvh_genres ustvgo_channels = '<KEY>' ustvgo_png", "The above copyright notice and this permission notice shall be included in all", "[ tvh_genres['KIDS'] ], \"Music\": [ tvh_genres['MUSIC'] ], \"None\": None, \"Other\": None, \"Pro Sports\":", "notice and this permission notice shall be included in all copies or substantial", "], \"Tech & Gaming\": [ tvh_genres['TECHNOLOGY'] ], \"Travel\": [ tvh_genres['TRAVEL'] ], \"Variety Shows\":", "obtaining a copy of this software and associated documentation files (the \"Software\"), to", "], \"Science Fiction\": [ tvh_genres['SF'] ], \"Sports\": [ tvh_genres['SPORT'] ], \"Suspense\": [ tvh_genres['SF']", "tvh_genres['SPORT_SPECIAL'] ], \"Family\": [ tvh_genres['KIDS'] ], \"Fantasy\": [ tvh_genres['SF'] ], \"Food & Cooking\":", "substantial portions of the Software. \"\"\" from lib.tvheadend.epg_category import groups from lib.tvheadend.epg_category import", "ustvgo_channels = '<KEY>' ustvgo_png = '<KEY>' ustvgo_stream = 'gfpMXf5BjIUCXRpNtNHFzfkQtxdMkqSGgfW=' ustvgo_epg = '<KEY>' ustvgo_program", "[ tvh_genres['SF'] ], \"Food & Cooking\": [ tvh_genres['COOKING'] ], \"Game Show\": [ tvh_genres['GAME']", "https://github.com/rocky4546 This file is part of Cabernet Permission is hereby granted, free of", "file is part of Cabernet Permission is hereby granted, free of charge, to", "ustvgo_png = '<KEY>' ustvgo_stream = 'gfpMXf5BjIUCXRpNtNHFzfkQtxdMkqSGgfW=' ustvgo_epg = '<KEY>' ustvgo_program = '<KEY> ustvgo_groups", "portions of the Software. \"\"\" from lib.tvheadend.epg_category import groups from lib.tvheadend.epg_category import tvh_genres", "tvh_genres ustvgo_channels = '<KEY>' ustvgo_png = '<KEY>' ustvgo_stream = 'gfpMXf5BjIUCXRpNtNHFzfkQtxdMkqSGgfW=' ustvgo_epg = '<KEY>'", "tvh_genres['KIDS'] ], \"Music\": [ tvh_genres['MUSIC'] ], \"None\": None, \"Other\": None, \"Pro Sports\": [", "charge, to any person obtaining a copy of this software and associated documentation", "ROCKY4546 https://github.com/rocky4546 This file is part of Cabernet Permission is hereby granted, free", "[ tvh_genres['SF'] ], \"Kids\": [ tvh_genres['KIDS'] ], \"Music\": [ tvh_genres['MUSIC'] ], \"None\": None,", "\"Software\"), to deal in the Software without restriction, including without limitation the rights", "], \"Fantasy\": [ tvh_genres['SF'] ], \"Food & Cooking\": [ tvh_genres['COOKING'] ], \"Game Show\":", "ustvgo_program = '<KEY> ustvgo_groups = { } ustvgo_genres = { \"Action & Adventure\":", "\"Business\": [ tvh_genres['NEWS'] ], \"Comedy\": [ tvh_genres['COMEDY'] ], \"Documentary\": [ tvh_genres['DOCUMENTARY'] ], \"Drama\":", "deal in the Software without restriction, including without limitation the rights to use,", "= '<KEY> ustvgo_groups = { } ustvgo_genres = { \"Action & Adventure\": [", "tvh_genres['MUSIC'] ], \"None\": None, \"Other\": None, \"Pro Sports\": [ tvh_genres['SPORT'] ], \"Reality\": [", "granted, free of charge, to any person obtaining a copy of this software", "limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "tvh_genres['ADVENTURE'] ], \"Business\": [ tvh_genres['NEWS'] ], \"Comedy\": [ tvh_genres['COMEDY'] ], \"Documentary\": [ tvh_genres['DOCUMENTARY']", "[ tvh_genres['ADVENTURE'] ], \"Business\": [ tvh_genres['NEWS'] ], \"Comedy\": [ tvh_genres['COMEDY'] ], \"Documentary\": [", "import groups from lib.tvheadend.epg_category import tvh_genres ustvgo_channels = '<KEY>' ustvgo_png = '<KEY>' ustvgo_stream", "is part of Cabernet Permission is hereby granted, free of charge, to any", "tvh_genres['EDUCATIONAL'] ], \"Events & Specials\": [ tvh_genres['SPORT_SPECIAL'] ], \"Family\": [ tvh_genres['KIDS'] ], \"Fantasy\":", "the Software is furnished to do so, subject to the following conditions: The", "\"Action & Adventure\": [ tvh_genres['ADVENTURE'] ], \"Business\": [ tvh_genres['NEWS'] ], \"Comedy\": [ tvh_genres['COMEDY']", "part of Cabernet Permission is hereby granted, free of charge, to any person", "\"Events & Specials\": [ tvh_genres['SPORT_SPECIAL'] ], \"Family\": [ tvh_genres['KIDS'] ], \"Fantasy\": [ tvh_genres['SF']", "[ tvh_genres['KIDS'] ], \"Fantasy\": [ tvh_genres['SF'] ], \"Food & Cooking\": [ tvh_genres['COOKING'] ],", "of this software and associated documentation files (the \"Software\"), to deal in the", "publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons", "], \"Drama\": [ tvh_genres['MOVIE'] ], \"Educational\": [ tvh_genres['EDUCATIONAL'] ], \"Events & Specials\": [", "tvh_genres['GAME'] ], \"Science\": [ tvh_genres['SCIENCE'] ], \"Science Fiction\": [ tvh_genres['SF'] ], \"Sports\": [", "including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,", "copies or substantial portions of the Software. \"\"\" from lib.tvheadend.epg_category import groups from", "sell copies of the Software, and to permit persons to whom the Software", "shall be included in all copies or substantial portions of the Software. \"\"\"", "[ tvh_genres['EDUCATIONAL'] ], \"Events & Specials\": [ tvh_genres['SPORT_SPECIAL'] ], \"Family\": [ tvh_genres['KIDS'] ],", "of Cabernet Permission is hereby granted, free of charge, to any person obtaining", "persons to whom the Software is furnished to do so, subject to the", "subject to the following conditions: The above copyright notice and this permission notice", "tvh_genres['FITNESS'] ], \"Horror\": [ tvh_genres['SF'] ], \"Kids\": [ tvh_genres['KIDS'] ], \"Music\": [ tvh_genres['MUSIC']", "\"Horror\": [ tvh_genres['SF'] ], \"Kids\": [ tvh_genres['KIDS'] ], \"Music\": [ tvh_genres['MUSIC'] ], \"None\":", "\"Game Show\": [ tvh_genres['GAME'] ], \"Health & Lifestyle\": [ tvh_genres['FITNESS'] ], \"Horror\": [", "Software is furnished to do so, subject to the following conditions: The above", "[ tvh_genres['MUSIC'] ], \"None\": None, \"Other\": None, \"Pro Sports\": [ tvh_genres['SPORT'] ], \"Reality\":", "\"Tech & Gaming\": [ tvh_genres['TECHNOLOGY'] ], \"Travel\": [ tvh_genres['TRAVEL'] ], \"Variety Shows\": [", "], \"Health & Lifestyle\": [ tvh_genres['FITNESS'] ], \"Horror\": [ tvh_genres['SF'] ], \"Kids\": [", "'<KEY>' ustvgo_stream = 'gfpMXf5BjIUCXRpNtNHFzfkQtxdMkqSGgfW=' ustvgo_epg = '<KEY>' ustvgo_program = '<KEY> ustvgo_groups = {", "'gfpMXf5BjIUCXRpNtNHFzfkQtxdMkqSGgfW=' ustvgo_epg = '<KEY>' ustvgo_program = '<KEY> ustvgo_groups = { } ustvgo_genres =", "= '<KEY>' ustvgo_program = '<KEY> ustvgo_groups = { } ustvgo_genres = { \"Action", "], \"Reality\": [ tvh_genres['GAME'] ], \"Science\": [ tvh_genres['SCIENCE'] ], \"Science Fiction\": [ tvh_genres['SF']", "& Specials\": [ tvh_genres['SPORT_SPECIAL'] ], \"Family\": [ tvh_genres['KIDS'] ], \"Fantasy\": [ tvh_genres['SF'] ],", "do so, subject to the following conditions: The above copyright notice and this", "], \"Comedy\": [ tvh_genres['COMEDY'] ], \"Documentary\": [ tvh_genres['DOCUMENTARY'] ], \"Drama\": [ tvh_genres['MOVIE'] ],", "is hereby granted, free of charge, to any person obtaining a copy of", "and associated documentation files (the \"Software\"), to deal in the Software without restriction,", "without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or", "], \"Food & Cooking\": [ tvh_genres['COOKING'] ], \"Game Show\": [ tvh_genres['GAME'] ], \"Health", "\"Educational\": [ tvh_genres['EDUCATIONAL'] ], \"Events & Specials\": [ tvh_genres['SPORT_SPECIAL'] ], \"Family\": [ tvh_genres['KIDS']", "tvh_genres['NEWS'] ], \"Comedy\": [ tvh_genres['COMEDY'] ], \"Documentary\": [ tvh_genres['DOCUMENTARY'] ], \"Drama\": [ tvh_genres['MOVIE']", "to deal in the Software without restriction, including without limitation the rights to", "[ tvh_genres['SF'] ], \"Sports\": [ tvh_genres['SPORT'] ], \"Suspense\": [ tvh_genres['SF'] ], \"Talk &", "<reponame>cookieisland/cabernet # pylama:ignore=E203,E221 \"\"\" MIT License Copyright (C) 2021 ROCKY4546 https://github.com/rocky4546 This file", "# pylama:ignore=E203,E221 \"\"\" MIT License Copyright (C) 2021 ROCKY4546 https://github.com/rocky4546 This file is", "is furnished to do so, subject to the following conditions: The above copyright", "], \"Events & Specials\": [ tvh_genres['SPORT_SPECIAL'] ], \"Family\": [ tvh_genres['KIDS'] ], \"Fantasy\": [", "\"Other\": None, \"Pro Sports\": [ tvh_genres['SPORT'] ], \"Reality\": [ tvh_genres['GAME'] ], \"Science\": [", "hereby granted, free of charge, to any person obtaining a copy of this", "to whom the Software is furnished to do so, subject to the following", "Cooking\": [ tvh_genres['COOKING'] ], \"Game Show\": [ tvh_genres['GAME'] ], \"Health & Lifestyle\": [", "], \"Music\": [ tvh_genres['MUSIC'] ], \"None\": None, \"Other\": None, \"Pro Sports\": [ tvh_genres['SPORT']", "], \"Sports\": [ tvh_genres['SPORT'] ], \"Suspense\": [ tvh_genres['SF'] ], \"Talk & Interview\": [", "documentation files (the \"Software\"), to deal in the Software without restriction, including without", "files (the \"Software\"), to deal in the Software without restriction, including without limitation", "[ tvh_genres['SPORT_SPECIAL'] ], \"Family\": [ tvh_genres['KIDS'] ], \"Fantasy\": [ tvh_genres['SF'] ], \"Food &", "[ tvh_genres['COOKING'] ], \"Game Show\": [ tvh_genres['GAME'] ], \"Health & Lifestyle\": [ tvh_genres['FITNESS']", "tvh_genres['SCIENCE'] ], \"Science Fiction\": [ tvh_genres['SF'] ], \"Sports\": [ tvh_genres['SPORT'] ], \"Suspense\": [", "Software without restriction, including without limitation the rights to use, copy, modify, merge,", "so, subject to the following conditions: The above copyright notice and this permission", "restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute,", "& Cooking\": [ tvh_genres['COOKING'] ], \"Game Show\": [ tvh_genres['GAME'] ], \"Health & Lifestyle\":", "to do so, subject to the following conditions: The above copyright notice and", "Cabernet Permission is hereby granted, free of charge, to any person obtaining a", "tvh_genres['SF'] ], \"Sports\": [ tvh_genres['SPORT'] ], \"Suspense\": [ tvh_genres['SF'] ], \"Talk & Interview\":", "tvh_genres['SPORT'] ], \"Reality\": [ tvh_genres['GAME'] ], \"Science\": [ tvh_genres['SCIENCE'] ], \"Science Fiction\": [", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the", "to the following conditions: The above copyright notice and this permission notice shall", "= { \"Action & Adventure\": [ tvh_genres['ADVENTURE'] ], \"Business\": [ tvh_genres['NEWS'] ], \"Comedy\":", "Sports\": [ tvh_genres['SPORT'] ], \"Reality\": [ tvh_genres['GAME'] ], \"Science\": [ tvh_genres['SCIENCE'] ], \"Science", "= 'gfpMXf5BjIUCXRpNtNHFzfkQtxdMkqSGgfW=' ustvgo_epg = '<KEY>' ustvgo_program = '<KEY> ustvgo_groups = { } ustvgo_genres", "\"Science Fiction\": [ tvh_genres['SF'] ], \"Sports\": [ tvh_genres['SPORT'] ], \"Suspense\": [ tvh_genres['SF'] ],", "Software, and to permit persons to whom the Software is furnished to do", "& Gaming\": [ tvh_genres['TECHNOLOGY'] ], \"Travel\": [ tvh_genres['TRAVEL'] ], \"Variety Shows\": [ tvh_genres['VARIETY']", "in the Software without restriction, including without limitation the rights to use, copy,", "[ tvh_genres['SPORT'] ], \"Suspense\": [ tvh_genres['SF'] ], \"Talk & Interview\": [ tvh_genres['TALK_SHOW'] ],", "or substantial portions of the Software. \"\"\" from lib.tvheadend.epg_category import groups from lib.tvheadend.epg_category", "ustvgo_epg = '<KEY>' ustvgo_program = '<KEY> ustvgo_groups = { } ustvgo_genres = {", "\"Pro Sports\": [ tvh_genres['SPORT'] ], \"Reality\": [ tvh_genres['GAME'] ], \"Science\": [ tvh_genres['SCIENCE'] ],", "tvh_genres['MOVIE'] ], \"Educational\": [ tvh_genres['EDUCATIONAL'] ], \"Events & Specials\": [ tvh_genres['SPORT_SPECIAL'] ], \"Family\":", "[ tvh_genres['SPORT'] ], \"Reality\": [ tvh_genres['GAME'] ], \"Science\": [ tvh_genres['SCIENCE'] ], \"Science Fiction\":", "], \"Talk & Interview\": [ tvh_genres['TALK_SHOW'] ], \"Tech & Gaming\": [ tvh_genres['TECHNOLOGY'] ],", "in all copies or substantial portions of the Software. \"\"\" from lib.tvheadend.epg_category import", "\"Reality\": [ tvh_genres['GAME'] ], \"Science\": [ tvh_genres['SCIENCE'] ], \"Science Fiction\": [ tvh_genres['SF'] ],", "tvh_genres['SF'] ], \"Food & Cooking\": [ tvh_genres['COOKING'] ], \"Game Show\": [ tvh_genres['GAME'] ],", "of the Software, and to permit persons to whom the Software is furnished", "\"Music\": [ tvh_genres['MUSIC'] ], \"None\": None, \"Other\": None, \"Pro Sports\": [ tvh_genres['SPORT'] ],", "License Copyright (C) 2021 ROCKY4546 https://github.com/rocky4546 This file is part of Cabernet Permission", "[ tvh_genres['SF'] ], \"Talk & Interview\": [ tvh_genres['TALK_SHOW'] ], \"Tech & Gaming\": [", "and/or sell copies of the Software, and to permit persons to whom the", "to any person obtaining a copy of this software and associated documentation files", "\"Drama\": [ tvh_genres['MOVIE'] ], \"Educational\": [ tvh_genres['EDUCATIONAL'] ], \"Events & Specials\": [ tvh_genres['SPORT_SPECIAL']", "above copyright notice and this permission notice shall be included in all copies", "= '<KEY>' ustvgo_stream = 'gfpMXf5BjIUCXRpNtNHFzfkQtxdMkqSGgfW=' ustvgo_epg = '<KEY>' ustvgo_program = '<KEY> ustvgo_groups =", "Software. \"\"\" from lib.tvheadend.epg_category import groups from lib.tvheadend.epg_category import tvh_genres ustvgo_channels = '<KEY>'", "of charge, to any person obtaining a copy of this software and associated", "(the \"Software\"), to deal in the Software without restriction, including without limitation the", "Fiction\": [ tvh_genres['SF'] ], \"Sports\": [ tvh_genres['SPORT'] ], \"Suspense\": [ tvh_genres['SF'] ], \"Talk", "\"Health & Lifestyle\": [ tvh_genres['FITNESS'] ], \"Horror\": [ tvh_genres['SF'] ], \"Kids\": [ tvh_genres['KIDS']", "{ } ustvgo_genres = { \"Action & Adventure\": [ tvh_genres['ADVENTURE'] ], \"Business\": [", "Gaming\": [ tvh_genres['TECHNOLOGY'] ], \"Travel\": [ tvh_genres['TRAVEL'] ], \"Variety Shows\": [ tvh_genres['VARIETY'] ]" ]
[ "= MyCompleter([file for file in os_test.listdir(f'/home/{getpass.getuser()}') if not file.startswith('.')]) readline.set_completer(completer.complete) readline.parse_and_bind('tab: complete') input(\"Input:", "class MyCompleter: def __init__(self, options): self.options = sorted(options) def complete(self, text, state): if", "try: return self.matches[state] except IndexError: return None completer = MyCompleter([file for file in", "self.matches[state] except IndexError: return None completer = MyCompleter([file for file in os_test.listdir(f'/home/{getpass.getuser()}') if", "self.matches = [s for s in self.options if s and s.startswith(text)] else: self.matches", "def complete(self, text, state): if state == 0: if text: self.matches = [s", "MyCompleter: def __init__(self, options): self.options = sorted(options) def complete(self, text, state): if state", "import os_test import readline class MyCompleter: def __init__(self, options): self.options = sorted(options) def", "return self.matches[state] except IndexError: return None completer = MyCompleter([file for file in os_test.listdir(f'/home/{getpass.getuser()}')", "sorted(options) def complete(self, text, state): if state == 0: if text: self.matches =", "if state == 0: if text: self.matches = [s for s in self.options", "self.options if s and s.startswith(text)] else: self.matches = self.options[:] try: return self.matches[state] except", "MyCompleter([file for file in os_test.listdir(f'/home/{getpass.getuser()}') if not file.startswith('.')]) readline.set_completer(completer.complete) readline.parse_and_bind('tab: complete') input(\"Input: \")", "state): if state == 0: if text: self.matches = [s for s in", "if text: self.matches = [s for s in self.options if s and s.startswith(text)]", "import getpass import os_test import readline class MyCompleter: def __init__(self, options): self.options =", "and s.startswith(text)] else: self.matches = self.options[:] try: return self.matches[state] except IndexError: return None", "None completer = MyCompleter([file for file in os_test.listdir(f'/home/{getpass.getuser()}') if not file.startswith('.')]) readline.set_completer(completer.complete) readline.parse_and_bind('tab:", "complete(self, text, state): if state == 0: if text: self.matches = [s for", "[s for s in self.options if s and s.startswith(text)] else: self.matches = self.options[:]", "== 0: if text: self.matches = [s for s in self.options if s", "__init__(self, options): self.options = sorted(options) def complete(self, text, state): if state == 0:", "getpass import os_test import readline class MyCompleter: def __init__(self, options): self.options = sorted(options)", "s.startswith(text)] else: self.matches = self.options[:] try: return self.matches[state] except IndexError: return None completer", "0: if text: self.matches = [s for s in self.options if s and", "completer = MyCompleter([file for file in os_test.listdir(f'/home/{getpass.getuser()}') if not file.startswith('.')]) readline.set_completer(completer.complete) readline.parse_and_bind('tab: complete')", "self.options[:] try: return self.matches[state] except IndexError: return None completer = MyCompleter([file for file", "self.matches = self.options[:] try: return self.matches[state] except IndexError: return None completer = MyCompleter([file", "= self.options[:] try: return self.matches[state] except IndexError: return None completer = MyCompleter([file for", "= sorted(options) def complete(self, text, state): if state == 0: if text: self.matches", "text: self.matches = [s for s in self.options if s and s.startswith(text)] else:", "state == 0: if text: self.matches = [s for s in self.options if", "self.options = sorted(options) def complete(self, text, state): if state == 0: if text:", "= [s for s in self.options if s and s.startswith(text)] else: self.matches =", "for s in self.options if s and s.startswith(text)] else: self.matches = self.options[:] try:", "IndexError: return None completer = MyCompleter([file for file in os_test.listdir(f'/home/{getpass.getuser()}') if not file.startswith('.')])", "s in self.options if s and s.startswith(text)] else: self.matches = self.options[:] try: return", "return None completer = MyCompleter([file for file in os_test.listdir(f'/home/{getpass.getuser()}') if not file.startswith('.')]) readline.set_completer(completer.complete)", "import readline class MyCompleter: def __init__(self, options): self.options = sorted(options) def complete(self, text,", "s and s.startswith(text)] else: self.matches = self.options[:] try: return self.matches[state] except IndexError: return", "if s and s.startswith(text)] else: self.matches = self.options[:] try: return self.matches[state] except IndexError:", "except IndexError: return None completer = MyCompleter([file for file in os_test.listdir(f'/home/{getpass.getuser()}') if not", "options): self.options = sorted(options) def complete(self, text, state): if state == 0: if", "<reponame>mrHola21/Eterm<gh_stars>10-100 import getpass import os_test import readline class MyCompleter: def __init__(self, options): self.options", "readline class MyCompleter: def __init__(self, options): self.options = sorted(options) def complete(self, text, state):", "in self.options if s and s.startswith(text)] else: self.matches = self.options[:] try: return self.matches[state]", "def __init__(self, options): self.options = sorted(options) def complete(self, text, state): if state ==", "os_test import readline class MyCompleter: def __init__(self, options): self.options = sorted(options) def complete(self,", "else: self.matches = self.options[:] try: return self.matches[state] except IndexError: return None completer =", "text, state): if state == 0: if text: self.matches = [s for s" ]
[ "столкновения из треугольников. for poly in triangles: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad", "показывать полигон столкновения. if obj.hatcher.visibility_collision_polygons: node_path.show() return group.node().getChild(0) def geom_node_create(obj, scene): geom =", "3: for index in poly.vertices[2:]: triangles.append(poly) # Если у полигона четыре вершины, необходимо", "PerspectiveLens, OrthographicLens, CS_default, CS_zup_right, CS_yup_right, CS_zup_left, CS_yup_left, CS_invalid from panda3d.core import GeomVertexArrayFormat, Geom,", "GeomVertexFormat.registerFormat(my_format) return end_format, color, texcoord def geom_create(obj): geom_vertex_format = get_format(obj) color = geom_vertex_format[1]", "bam_writer_file(path_save, root) show_message_box('Export object: {} completed, time: {}'.format(obj.name, datetime.now() - start_time), \"Message\") return", "полигоны столкновения из многоугольников. for name in named_not_quad: # Нужно разбить многоугольники на", "bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def triangle_poly(poly, obj): trangle = {} triangulator3 = Triangulator3() index_tr =", "Проверяем существует ли директория, если нет то создаем. if not os.path.exists(path_project_save): try: os.makedirs(path_project_save)", "bl_label = \"Checking_coplanarity\" def execute(self, context): select_not_coplanar(context.object) return {'FINISHED'} class CheckingQuad(bpy.types.Operator): bl_idname =", "panda3d.core import CollisionPolygon, CollisionNode import bpy import bmesh from mathutils.geometry import distance_point_to_plane ostream", "2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.into_mask_1.decode('utf-8'), obj.hatcher.into_mask_2.decode('utf-8'), obj.hatcher.into_mask_3.decode('utf-8'), obj.hatcher.into_mask_4.decode('utf-8'), obj.hatcher.into_mask_5.decode('utf-8'), obj.hatcher.into_mask_6.decode('utf-8'), obj.hatcher.into_mask_7.decode('utf-8'), obj.hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2))", "= GeomVertexWriter(vdata, 'vertex') normal_vertex = GeomVertexWriter(vdata, 'normal') # Если используются цвета вершин. if", "CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) for collision_node in collision_node_dict.values(): from_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.from_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_3.decode('utf-8'),", "None # Обработка второй вершины. if not triangle.loops[1] in list_vertext: vertex_position.set_row(triangle.loops[1]) normal_vertex.set_row(triangle.loops[1]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[1]].co[0],", "ModelRoot('{}.bam'.format(obj.name)) root.add_child(node) bam_writer_file(path_save, root) show_message_box('Export object: {} completed, time: {}'.format(obj.name, datetime.now() - start_time),", "вершины. if not triangle.loops[2] in list_vertext: vertex_position.set_row(triangle.loops[2]) normal_vertex.set_row(triangle.loops[2]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[2]].co[0], obj.data.vertices[triangle.vertices[2]].co[1], obj.data.vertices[triangle.vertices[2]].co[2]) if triangle.use_smooth:", "Geom.C_normal) # Проверка есть ли цвета вершин у объекта. if obj.data.vertex_colors.active: color =", "== 3: status = True elif len(poly.vertices) >= 3: v1 = obj.data.vertices[poly.vertices[1]].co -", "же создаем дополнительные слои. for uv in obj.data.uv_layers: # Если имя не совподает", "triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[1]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[1]].uv[0],", "GeomVertexWriter(vdata, 'vertex') normal_vertex = GeomVertexWriter(vdata, 'normal') # Если используются цвета вершин. if color:", "in context.selected_objects: # Объединяем путь проекта и относительную директорию модели. path_project_save = os.path.join(context.scene.hatcher.ful_path_project,", "obj) else: add_polygons_to_dict(named_not_coplanar, poly, obj) # Если у полигона более четырех вершин, необходимо", "path_save = os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) else: node = build_hierarchy(obj, context.scene) # Объединяем", "объединения. root = ModelRoot('{}.bam'.format(context.scene.hatcher.file_name_selected)) # Перебираем список выбранных объектов. for obj in context.selected_objects:", "lens.set_coordinate_system(CS_yup_left) if obj.hatcher.coordinate_system == \"CS_invalid\": lens.set_coordinate_system(CS_invalid) camera = Camera(obj.data.name) camera.active = obj.hatcher.camera_active bit", "= GeomVertexFormat() my_format.addArray(geom_vertex_format) # Регистрируем формат. end_format = GeomVertexFormat.registerFormat(my_format) return end_format, color, texcoord", "совподает с активным. if not uv.name == obj.data.uv_layers.active.name: geom_vertex_format.add_column('texcoord.{}'.format(uv.name), 2, Geom.NT_float32, Geom.C_texcoord) #", "== obj.data.uv_layers.active.name: texcoord_vertex_list[uv.name] = GeomVertexWriter(vdata, 'texcoord.{}'.format(uv.name)) # Запишем порядок треугольников. prim = GeomTriangles(Geom.UHStatic)", "# Запишем порядок треугольников. prim = GeomTriangles(Geom.UHStatic) prim.makeIndexed() prim.setIndexType(Geom.NT_uint32) mesh = obj.data mesh.calc_loop_triangles()", "Добавляем вершины в примитив. prim.addVertices(triangle.loops[0], triangle.loops[1], triangle.loops[2]) prim.closePrimitive() geom = Geom(vdata) geom.addPrimitive(prim) return", "прямольников. for name in named_not_coplanar: # Нужно разбить некомпланарные полигоны, на треугольники. for", "NodePath, ModelRoot from panda3d.core import BamFile, BamWriter, Filename, Notify from panda3d.core import CollisionPolygon,", "{'texcoord': GeomVertexWriter(vdata, 'texcoord')} # Так же создаем дополнительные слои. for uv in obj.data.uv_layers:", "полигоны столкновения из треугольников. for poly in triangles: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co))", "= [] for poly in obj.data.polygons: if not check_coplanar(obj, poly): not_coplanar.append(poly) for i", "{'FINISHED'} class ExportScene(bpy.types.Operator): bl_idname = \"ui.export_scene\" bl_label = \"Generator_scene\" def execute(self, context): start_time", "np.set_transform(root, conversion_transform(obj)) # Проходим по детям. for child in obj.children: recurse(child, obj) recurse(obj,", "False, 'ORTHO': False, 'CAMERA':True} def show_message_box(message = \"\", title = \"Message Box\", icon", "obj.hatcher.into_mask_2.decode('utf-8'), obj.hatcher.into_mask_3.decode('utf-8'), obj.hatcher.into_mask_4.decode('utf-8'), obj.hatcher.into_mask_5.decode('utf-8'), obj.hatcher.into_mask_6.decode('utf-8'), obj.hatcher.into_mask_7.decode('utf-8'), obj.hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) # Если полигон столкновения", "geom_vertex_format.add_column(\"vertex\", 3, Geom.NT_float32, Geom.C_point) geom_vertex_format.add_column(\"normal\", 3, Geom.NT_float32, Geom.C_normal) # Проверка есть ли цвета", "frame_size = obj.data.view_frame(scene = scene) if obj.data.type == 'PERSP': lens = PerspectiveLens() if", "geom_vertex_format.add_column('color.{}'.format(col.name), 4, Geom.NT_uint8, Geom.C_color) # Проверка есть ли активные текстурные координаты у объекта.", "end_format = GeomVertexFormat.registerFormat(my_format) return end_format, color, texcoord def geom_create(obj): geom_vertex_format = get_format(obj) color", "материала и он содержит имя, рассортировываем их по словарям под этим именем. if", "Проверка есть ли цвета вершин у объекта. if obj.data.vertex_colors.active: color = True #", "# Пройдем по всем объектом в сцене. for obj in context.scene.objects: # Нас", "else: node = build_hierarchy(obj, context.scene) # Объединяем путь директории и имя файла. path_save", "obj.data.vertex_colors.active.data[triangle.loops[2]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[2]].color[0], obj.data.vertex_colors[name].data[triangle.loops[2]].color[1], obj.data.vertex_colors[name].data[triangle.loops[2]].color[2], obj.data.vertex_colors[name].data[triangle.loops[2]].color[3]) list_vertext[triangle.loops[2]] = None # Добавляем вершины в", "есть слот материала и он содержит имя, рассортировываем их по словарям под этим", "color_vertex_list[name].set_row(triangle.loops[2]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[2]].color[0], obj.data.vertex_colors.active.data[triangle.loops[2]].color[1], obj.data.vertex_colors.active.data[triangle.loops[2]].color[2], obj.data.vertex_colors.active.data[triangle.loops[2]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[2]].color[0], obj.data.vertex_colors[name].data[triangle.loops[2]].color[1], obj.data.vertex_colors[name].data[triangle.loops[2]].color[2],", "v1 = obj.data.vertices[poly.vertices[1]].co - obj.data.vertices[poly.vertices[0]].co v2 = obj.data.vertices[poly.vertices[2]].co - obj.data.vertices[poly.vertices[0]].co for index in", "объеденяем в один файл. if not context.scene.hatcher.file_name_selected == '': # Создаем корень для", "in list_vertext: vertex_position.set_row(triangle.loops[0]) normal_vertex.set_row(triangle.loops[0]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[0]].co[0], obj.data.vertices[triangle.vertices[0]].co[1], obj.data.vertices[triangle.vertices[0]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[0]].normal[0], obj.data.vertices[triangle.vertices[0]].normal[1], obj.data.vertices[triangle.vertices[0]].normal[2]) else:", "которых более четырех сторон на треугольники. for poly in not_quad: for vertext in", "context): start_time = datetime.now() context.view_layer.update() # Перебираем список выбранных объектов. for obj in", "root.add_child(node) bam_writer_file(path_save, root) show_message_box('Export object: {} completed, time: {}'.format(obj.name, datetime.now() - start_time), \"Message\")", "triangle.loops[1] in list_vertext: vertex_position.set_row(triangle.loops[1]) normal_vertex.set_row(triangle.loops[1]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[1]].co[0], obj.data.vertices[triangle.vertices[1]].co[1], obj.data.vertices[triangle.vertices[1]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[1]].normal[0], obj.data.vertices[triangle.vertices[1]].normal[1], obj.data.vertices[triangle.vertices[1]].normal[2])", "for uv in obj.data.uv_layers: # Если имя не совподает с активным. if not", "writer.flush() file.close() def conversion_transform(obj): pos = Point3(*obj.matrix_world.translation) quat = LQuaternion(*obj.matrix_world.to_quaternion()) scale = Point3(*obj.matrix_world.to_scale())", "abs(frame_size[1][1])) lens.set_focal_length(abs(frame_size[0][2])) lens.set_near_far(obj.data.clip_start, obj.data.clip_end) if obj.hatcher.coordinate_system == \"CS_default\": lens.set_coordinate_system(CS_default) if obj.hatcher.coordinate_system == \"CS_zup_right\":", "Если полигон компланарный if check_coplanar(obj, poly): add_polygons_to_dict(named_coplanar, poly, obj) else: add_polygons_to_dict(named_not_coplanar, poly, obj)", "# Объединяем путь директории и имя файла. path_save = os.path.join(path_project_save, obj.name) node =", "== obj.data.uv_layers.active.name: geom_vertex_format.add_column('texcoord.{}'.format(uv.name), 2, Geom.NT_float32, Geom.C_texcoord) # Создаем формат. my_format = GeomVertexFormat() my_format.addArray(geom_vertex_format)", "массив. geom_vertex_format = GeomVertexArrayFormat() # Создаем колонку для вершин. geom_vertex_format.add_column(\"vertex\", 3, Geom.NT_float32, Geom.C_point)", "obj.type == \"LIGHT\": create_object = \"LIGHT\" # Если объект является камерой. if obj.type", "# Создаем полигоны столкновения из компланарных прямольников. for name in named_coplanar: for poly", "into_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.into_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) node_path", "obj): trangle = {} triangulator3 = Triangulator3() index_tr = 0 for index in", "color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[1]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[1]].color[0], obj.data.vertex_colors.active.data[triangle.loops[1]].color[1], obj.data.vertex_colors.active.data[triangle.loops[1]].color[2],", "имя файла. path_save = os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) else: node = build_hierarchy(obj, context.scene)", "geom_node_create(obj, scene): geom = geom_create(obj) geom_node = GeomNode(obj.data.name) geom_node.addGeom(geom) return geom_node def camera_create(obj,", "= NodePath(\"root\") # Выполним рекурсию, для поиска всех. def recurse(obj, parent): # Переменая", "# Создаем новый массив. geom_vertex_format = GeomVertexArrayFormat() # Создаем колонку для вершин. geom_vertex_format.add_column(\"vertex\",", "triangle in mesh.loop_triangles: # Обработка первой вершины. if not triangle.loops[0] in list_vertext: vertex_position.set_row(triangle.loops[0])", "not_coplanar.append(poly) for i in obj.data.vertices: i.select=False for i in obj.data.edges: i.select=False for i", "полигона более четырех вершин, необходимо разбить на треугольники. elif len(poly.vertices) >= 4: add_polygons_to_dict(named_not_quad,", "result = root.find('**/{}'.format(parent.name)) if result: np.reparentTo(result) np.set_transform(root, conversion_transform(obj)) else: np.reparentTo(root) np.set_transform(root, conversion_transform(obj)) #", "get_format(obj): color = False texcoord = False # Создаем новый массив. geom_vertex_format =", "index_tr += 1 triangulator3.triangulate() for i in range(triangulator3.getNumTriangles()): v0 = triangulator3.get_vertex(triangulator3.get_triangle_v0(i)) v1 =", "ModelRoot('{}.bam'.format(context.scene.name)) # Пройдем по всем объектом в сцене. for obj in context.scene.objects: #", "порядок треугольников. prim = GeomTriangles(Geom.UHStatic) prim.makeIndexed() prim.setIndexType(Geom.NT_uint32) mesh = obj.data mesh.calc_loop_triangles() # Сюда", "in triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) from_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.from_mask_1.decode('utf-8'), obj.hatcher.from_mask_2.decode('utf-8'),", "# Создаем колонку для координат c именем по умолчанию. geom_vertex_format.add_column(\"texcoord\", 2, Geom.NT_float32, Geom.C_texcoord)", "for child in obj.children: recurse(child, obj) recurse(obj, obj.parent) return root.node().getChild(0) import os from", "является камерой. if obj.type == \"CAMERA\": if obj.data.type != 'PANO': create_object = camera_create", "panda3d.core import BamFile, BamWriter, Filename, Notify from panda3d.core import CollisionPolygon, CollisionNode import bpy", "triangulator3.add_polygon_vertex(index_tr) triangulator3.add_vertex(*obj.data.vertices[index].co) index_tr += 1 triangulator3.triangulate() for i in range(triangulator3.getNumTriangles()): v0 = triangulator3.get_vertex(triangulator3.get_triangle_v0(i))", "obj.data.uv_layers.active.data[triangle.loops[1]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[1]].uv[0], obj.data.uv_layers[name].data[triangle.loops[1]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[1]) if name", "False, 'PERSP': False, 'ORTHO': False, 'CAMERA':True} def show_message_box(message = \"\", title = \"Message", "obj.data.vertices[triangle.vertices[2]].normal[1], obj.data.vertices[triangle.vertices[2]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[2])", "bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def check_coplanar(obj, poly): status = False # Если вершины три, это", "col.name == obj.data.vertex_colors.active.name: geom_vertex_format.add_column('color.{}'.format(col.name), 4, Geom.NT_uint8, Geom.C_color) # Проверка есть ли активные текстурные", "obj.data.clip_end) if obj.hatcher.coordinate_system == \"CS_default\": lens.set_coordinate_system(CS_default) if obj.hatcher.coordinate_system == \"CS_zup_right\": lens.set_coordinate_system(CS_zup_right) if obj.hatcher.coordinate_system", "if obj.type in list_object_support: # Если есть ли подтип. if list_object_support[obj.type]: if not", "три, это значит полигон автоматически копланарен. if len(poly.vertices) == 3: status = True", "Создаем полигоны столкновения из компланарных прямольников. for poly in coplanar: for index in", "это значит полигон автоматически копланарен. if len(poly.vertices) == 3: status = True elif", "с активным. if not col.name == obj.data.vertex_colors.active.name: color_vertex_list[col.name] = GeomVertexWriter(vdata, 'color.{}'.format(col.name)) # Если", "os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_other) # Проверяем существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save) #", "= ((v0[0], v0[1], v0[2]), (v1[0], v1[1], v1[2]), (v2[0], v2[1], v2[2])) return trangle def", "i.select=False for i in obj.data.polygons: i.select = False for poly in not_coplanar: poly.select", "== 3: for index in poly.vertices[2:]: triangles.append(poly) # Если у полигона четыре вершины,", "obj in context.selected_objects: # Проверим есть ли данный тип объекта среди поддерживаемых. if", "NodePath(collision_node) node_path.reparentTo(group) # Если стоит флажок показывать полигон столкновения. if obj.hatcher.visibility_collision_polygons: node_path.show() return", "panda3d.core import GeomNode, PandaNode, NodePath, ModelRoot from panda3d.core import BamFile, BamWriter, Filename, Notify", "# Нужно разбить многоугольники на треугольники. for poly in named_not_quad[name]: for vertext in", "{} named_not_coplanar = {} named_not_quad = {} triangles = [] coplanar = []", "vertext_quad[1], vertext_quad[2]), name) vertext_quad = [] # Создаем полигоны столкновения из компланарных прямольников.", "директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_other) # Проверяем существует ли директория, если нет", "# Если объект является источником цвета. if obj.type == \"LIGHT\": create_object = \"LIGHT\"", "полигон из трех вершин, проверка на компланарность не нужна. if len(poly.vertices) == 3:", "vertext_quad[3]), name) vertext_quad = [] # Создаем полигоны столкновения из некомпланарных прямольников. for", "Camera(obj.data.name) camera.active = obj.hatcher.camera_active bit = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.draw_mask_1.decode('utf-8'), obj.hatcher.draw_mask_2.decode('utf-8'), obj.hatcher.draw_mask_3.decode('utf-8'), obj.hatcher.draw_mask_4.decode('utf-8'), obj.hatcher.draw_mask_5.decode('utf-8'), obj.hatcher.draw_mask_6.decode('utf-8'), obj.hatcher.draw_mask_7.decode('utf-8'),", "triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) from_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.from_mask_1.decode('utf-8'), obj.hatcher.from_mask_2.decode('utf-8'), obj.hatcher.from_mask_3.decode('utf-8'),", "поиска всех. def recurse(obj, parent): # Переменая которая содережит функцию необходимую для экспорта", "= \"ui.check_coplanarity\" bl_label = \"Checking_coplanarity\" def execute(self, context): select_not_coplanar(context.object) return {'FINISHED'} class CheckingQuad(bpy.types.Operator):", "obj.data.vertex_colors.active.data[triangle.loops[1]].color[1], obj.data.vertex_colors.active.data[triangle.loops[1]].color[2], obj.data.vertex_colors.active.data[triangle.loops[1]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[1]].color[0], obj.data.vertex_colors[name].data[triangle.loops[1]].color[1], obj.data.vertex_colors[name].data[triangle.loops[1]].color[2], obj.data.vertex_colors[name].data[triangle.loops[1]].color[3]) list_vertext[triangle.loops[1]] = None # Обработка", "Если используются цвета вершин. if color: color_vertex_list = {'color': GeomVertexWriter(vdata, 'color')} # Так", "1 triangulator3.triangulate() for i in range(triangulator3.getNumTriangles()): v0 = triangulator3.get_vertex(triangulator3.get_triangle_v0(i)) v1 = triangulator3.get_vertex(triangulator3.get_triangle_v1(i)) v2", "Создаем полигоны столкновения из треугольников. for name in named_triangles: for poly in named_triangles[name]:", "только без родителя. if not obj.parent: # Проверим есть ли данный тип объекта", "рассортировываем по спискам else: # Если полигон из трех вершин, проверка на компланарность", "'texcoord')} # Так же создаем дополнительные слои. for uv in obj.data.uv_layers: # Если", "родителя. if not obj.parent: # Проверим есть ли данный тип объекта среди поддерживаемых.", "time: {}'.format(datetime.now() - start_time), \"Message\") return {'FINISHED'} class ExportSelected(bpy.types.Operator): bl_idname = \"ui.export_selected\" bl_label", "build_hierarchy(obj, scene): # Узел для формирование иерархии root = NodePath(\"root\") # Выполним рекурсию,", "obj.data.vertex_colors[name].data[triangle.loops[1]].color[2], obj.data.vertex_colors[name].data[triangle.loops[1]].color[3]) list_vertext[triangle.loops[1]] = None # Обработка третьей вершины. if not triangle.loops[2] in", "triangle.loops[1], triangle.loops[2]) prim.closePrimitive() geom = Geom(vdata) geom.addPrimitive(prim) return geom def select_not_quad(obj): not_quad =", "completed, time: {}'.format(datetime.now() - start_time), \"Message\") return {'FINISHED'} class ExportSelected(bpy.types.Operator): bl_idname = \"ui.export_selected\"", "not_coplanar: poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def triangle_poly(poly, obj): trangle = {} triangulator3", "Если объект является сеткой. if obj.type == \"MESH\": if obj.hatcher.type_mesh == \"Render\": create_object", "def add_polygons_to_dict(dict_named, poly, obj): # Если нет такого ключа в словаре. if not", "obj.data.edges: i.select=False for i in obj.data.polygons: i.select = False for poly in not_quad:", "создаем дополнительные слои. for col in obj.data.vertex_colors: # Если имя не совподает с", "координаты текстур. if texcoord: texcoord_vertex_list = {'texcoord': GeomVertexWriter(vdata, 'texcoord')} # Так же создаем", "if obj.hatcher.type_mesh == \"Collision\": create_object = collision_polygon_create # Если объект является источником цвета.", "index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]) collision_node.add_solid(quad) vertext_quad = []", "texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[1]].uv[0], obj.data.uv_layers.active.data[triangle.loops[1]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[1]].uv[0], obj.data.uv_layers[name].data[triangle.loops[1]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[1]) if", "obj.hatcher.into_mask_3.decode('utf-8'), obj.hatcher.into_mask_4.decode('utf-8'), obj.hatcher.into_mask_5.decode('utf-8'), obj.hatcher.into_mask_6.decode('utf-8'), obj.hatcher.into_mask_7.decode('utf-8'), obj.hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) # Если полигон столкновения содержит", "return root.node().getChild(0) import os from datetime import datetime class ExportObject(bpy.types.Operator): bl_idname = \"ui.export_object\"", "нет то создаем. if not os.path.exists(path_project_save): try: os.makedirs(path_project_save) except OSError as error: #print(error)", "def camera_create(obj, scene): frame_size = obj.data.view_frame(scene = scene) if obj.data.type == 'PERSP': lens", "\"Message Box\", icon = 'INFO'): def draw(self, context): self.layout.label(text = message) bpy.context.window_manager.popup_menu(draw, title", "по всем объектом в сцене. for obj in context.scene.objects: # Нас интересуют объекты", "for poly in named_triangles[name]: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]),", "len(poly.vertices) >= 4: not_quad.append(poly) else: # Если полигон из трех вершин, проверка на", "if check_coplanar(obj, poly): add_polygons_to_dict(named_coplanar, poly, obj) else: add_polygons_to_dict(named_not_coplanar, poly, obj) # Если у", "in triangles: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]) collision_node.add_solid(quad)", "прямольников. for name in named_coplanar: for poly in named_coplanar[name]: for index in poly.vertices:", "in poly.vertices: triangulator3.add_polygon_vertex(index_tr) triangulator3.add_vertex(*obj.data.vertices[index].co) index_tr += 1 triangulator3.triangulate() for i in range(triangulator3.getNumTriangles()): v0", "4, Geom.NT_uint8, Geom.C_color) # Так же создаем дополнительные колонки. for col in obj.data.vertex_colors:", "color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[2]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[2]].color[0], obj.data.vertex_colors.active.data[triangle.loops[2]].color[1], obj.data.vertex_colors.active.data[triangle.loops[2]].color[2],", "color, texcoord def geom_create(obj): geom_vertex_format = get_format(obj) color = geom_vertex_format[1] texcoord = geom_vertex_format[2]", "[] # Создаем полигоны столкновения из компланарных прямольников. for name in named_coplanar: for", "= False # Создаем новый массив. geom_vertex_format = GeomVertexArrayFormat() # Создаем колонку для", "Если используются координаты текстур. if texcoord: texcoord_vertex_list = {'texcoord': GeomVertexWriter(vdata, 'texcoord')} # Так", "треугольники. elif len(poly.vertices) >= 4: not_quad.append(poly) else: # Если полигон из трех вершин,", "[] coplanar = [] not_coplanar = [] not_quad = [] # Перебираем полигоны", "geom = Geom(vdata) geom.addPrimitive(prim) return geom def select_not_quad(obj): not_quad = [] for poly", "{}'.format(obj.name, datetime.now() - start_time), \"Message\") return {'FINISHED'} class ExportScene(bpy.types.Operator): bl_idname = \"ui.export_scene\" bl_label", "Если есть слот материала и он содержит имя, рассортировываем их по словарям под", "Создаем полигоны столкновения из некомпланарных прямольников. for name in named_not_coplanar: # Нужно разбить", "bpy.ops.mesh.select_mode(type=\"FACE\") def check_coplanar(obj, poly): status = False # Если вершины три, это значит", "def triangle_poly(poly, obj): trangle = {} triangulator3 = Triangulator3() index_tr = 0 for", "директория, если нет то создаем. checkcreate_dirs(path_project_save) # Если поле имени файла заполнено, то", "колонки. for uv in obj.data.uv_layers: # Если имя не совподает с активным. if", "объединения. root = ModelRoot('{}.bam'.format(context.scene.name)) # Пройдем по всем объектом в сцене. for obj", "именем по умолчанию. geom_vertex_format.add_column(\"color\", 4, Geom.NT_uint8, Geom.C_color) # Так же создаем дополнительные колонки.", "path_save = os.path.join(path_project_save, context.scene.hatcher.file_name_selected) bam_writer_file(path_save, root) # Если нет, то раздельно. else: #", "'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[2]].uv[0], obj.data.uv_layers.active.data[triangle.loops[2]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[2]].uv[0], obj.data.uv_layers[name].data[triangle.loops[2]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[2])", "Если стоит флажок показывать полигон столкновения. if obj.hatcher.visibility_collision_polygons: node_path.show() return group.node().getChild(0) def geom_node_create(obj,", "vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]), name) vertext_quad = [] # Создаем полигоны столкновения", "obj.data.uv_layers: # Если имя не совподает с активным. if not uv.name == obj.data.uv_layers.active.name:", "obj.data.materials[collision_node.name].hatcher.from_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.into_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_7.decode('utf-8'),", "obj.hatcher.type_mesh == \"Render\": create_object = geom_node_create if obj.hatcher.type_mesh == \"Collision\": create_object = collision_polygon_create", "obj.data.vertex_colors[name].data[triangle.loops[1]].color[1], obj.data.vertex_colors[name].data[triangle.loops[1]].color[2], obj.data.vertex_colors[name].data[triangle.loops[1]].color[3]) list_vertext[triangle.loops[1]] = None # Обработка третьей вершины. if not triangle.loops[2]", "for i in obj.data.polygons: i.select = False for poly in not_coplanar: poly.select =", "проверить на компланарность. elif len(poly.vertices) == 4: if check_coplanar(obj, poly): coplanar.append(poly) else: not_coplanar.append(poly)", "obj.data.vertex_colors.active.data[triangle.loops[1]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[1]].color[0], obj.data.vertex_colors[name].data[triangle.loops[1]].color[1], obj.data.vertex_colors[name].data[triangle.loops[1]].color[2], obj.data.vertex_colors[name].data[triangle.loops[1]].color[3]) list_vertext[triangle.loops[1]] = None # Обработка третьей вершины.", "четырех вершин, необходимо разбить на треугольники. elif len(poly.vertices) >= 4: add_polygons_to_dict(named_not_quad, poly, obj)", "build_hierarchy(obj, context.scene) # Объединяем путь директории и имя файла. path_save = os.path.join(path_project_save, obj.name)", "False # Если вершины три, это значит полигон автоматически копланарен. if len(poly.vertices) ==", "он содержит имя, рассортировываем их по словарям под этим именем. if hasattr(obj.data.materials[poly.material_index], 'name'):", "Проходим по детям. for child in obj.children: recurse(child, obj) recurse(obj, obj.parent) return root.node().getChild(0)", "for poly in not_quad: for vertext in triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0], vertext[1],", "GeomVertexWriter(vdata, 'color.{}'.format(col.name)) # Если используются координаты текстур. if texcoord: texcoord_vertex_list = {'texcoord': GeomVertexWriter(vdata,", "not_quad = [] for poly in obj.data.polygons: if len(poly.vertices) >= 5: not_quad.append(poly) for", "texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[0]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[0]].uv[0], obj.data.uv_layers.active.data[triangle.loops[0]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[0]].uv[0], obj.data.uv_layers[name].data[triangle.loops[0]].uv[1]) if color:", "= geom_node_create if obj.hatcher.type_mesh == \"Collision\": create_object = collision_polygon_create # Если объект является", "trangle def add_polygons_to_dict(dict_named, poly, obj): # Если нет такого ключа в словаре. if", "os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_scene) # Проверяем существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save) #", "раздельно. else: # Перебираем список выбранных объектов. for obj in context.selected_objects: # Проверим", "context.scene.hatcher.file_name_selected) bam_writer_file(path_save, root) # Если нет, то раздельно. else: # Перебираем список выбранных", "obj.hatcher.draw_mask_4.decode('utf-8'), obj.hatcher.draw_mask_5.decode('utf-8'), obj.hatcher.draw_mask_6.decode('utf-8'), obj.hatcher.draw_mask_7.decode('utf-8'), obj.hatcher.draw_mask_8.decode('utf-8')) camera.camera_mask = int(bit, 2) camera.set_lens(lens) return camera def", "poly.vertices: triangulator3.add_polygon_vertex(index_tr) triangulator3.add_vertex(*obj.data.vertices[index].co) index_tr += 1 triangulator3.triangulate() for i in range(triangulator3.getNumTriangles()): v0 =", "для экспорта данного типа объекта. create_object = None # Если объект является сеткой.", "try: os.makedirs(path_project_save) except OSError as error: #print(error) pass def bam_writer_file(path_save, obj): file =", "int(bit, 2) camera.set_lens(lens) return camera def build_hierarchy(obj, scene): # Узел для формирование иерархии", "color_vertex_list[name].set_row(triangle.loops[1]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[1]].color[0], obj.data.vertex_colors.active.data[triangle.loops[1]].color[1], obj.data.vertex_colors.active.data[triangle.loops[1]].color[2], obj.data.vertex_colors.active.data[triangle.loops[1]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[1]].color[0], obj.data.vertex_colors[name].data[triangle.loops[1]].color[1], obj.data.vertex_colors[name].data[triangle.loops[1]].color[2],", "if check_coplanar(obj, poly): coplanar.append(poly) else: not_coplanar.append(poly) # Если у полигона более четырех вершин,", "check_coplanar(obj, poly): coplanar.append(poly) else: not_coplanar.append(poly) # Если у полигона более четырех вершин, необходимо", "путь директории и имя файла. path_save = os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) show_message_box('Export selected,", "def check_coplanar(obj, poly): status = False # Если вершины три, это значит полигон", "проекта и относительную директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_other) # Проверяем существует ли", "треугольники. for poly in not_coplanar: for vertext in triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0],", "in poly.vertices[2:]: triangles.append(poly) # Если у полигона четыре вершины, необходимо проверить на компланарность.", "вершин, необходимо разбить на треугольники. elif len(poly.vertices) >= 4: not_quad.append(poly) ######################## ######################## group", "Geom.NT_uint8, Geom.C_color) # Проверка есть ли активные текстурные координаты у объекта. if obj.data.uv_layers.active:", "in obj.data.vertices: i.select=False for i in obj.data.edges: i.select=False for i in obj.data.polygons: i.select", "создаем. checkcreate_dirs(path_project_save) # Если поле имени файла заполнено, то объеденяем в один файл.", "директории и имя файла. path_save = os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) show_message_box('Export selected, completed,", "triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[1]].normal[0], obj.data.vertices[triangle.vertices[1]].normal[1], obj.data.vertices[triangle.vertices[1]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in", "start_time), \"Message\") return {'FINISHED'} class CheckingCoplanarity(bpy.types.Operator): bl_idname = \"ui.check_coplanarity\" bl_label = \"Checking_coplanarity\" def", "triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[2]) if name == 'texcoord':", "= file.getWriter() writer.writeObject(obj) writer.flush() file.close() def conversion_transform(obj): pos = Point3(*obj.matrix_world.translation) quat = LQuaternion(*obj.matrix_world.to_quaternion())", "not_quad.append(poly) for i in obj.data.vertices: i.select=False for i in obj.data.edges: i.select=False for i", "my_format.addArray(geom_vertex_format) # Регистрируем формат. end_format = GeomVertexFormat.registerFormat(my_format) return end_format, color, texcoord def geom_create(obj):", "return {'FINISHED'} class CheckingCoplanarity(bpy.types.Operator): bl_idname = \"ui.check_coplanarity\" bl_label = \"Checking_coplanarity\" def execute(self, context):", "node = build_hierarchy(obj, context.scene) root.add_child(node) # Объединяем путь директории и имя файла. path_save", "Triangulator3, GeomTriangles from panda3d.core import GeomNode, PandaNode, NodePath, ModelRoot from panda3d.core import BamFile,", "obj.hatcher.coordinate_system == \"CS_yup_right\": lens.set_coordinate_system(CS_yup_right) if obj.hatcher.coordinate_system == \"CS_zup_left\": lens.set_coordinate_system(CS_zup_left) if obj.hatcher.coordinate_system == \"CS_yup_left\":", "obj) # Если нет материала, то рассортировываем по спискам else: # Если полигон", "нет то создаем. checkcreate_dirs(path_project_save) # Создаем корень для объединения. root = ModelRoot('{}.bam'.format(context.scene.name)) #", "= os.path.join(context.scene.hatcher.ful_path_project, obj.hatcher.rel_path_object) # Проверяем существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save)", "= os.path.join(path_project_save, context.scene.hatcher.file_name_selected) bam_writer_file(path_save, root) # Если нет, то раздельно. else: # Перебираем", "triangle_poly(poly, obj): trangle = {} triangulator3 = Triangulator3() index_tr = 0 for index", "conversion_transform(obj)) else: # Если нет родителя. np = NodePath(create_object(obj, scene)) #np.setName(obj.name) #np.show() #", ">= 1: # Если есть слот материала и он содержит имя, рассортировываем их", "obj.data.vertices[triangle.vertices[1]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[1]].normal[0], obj.data.vertices[triangle.vertices[1]].normal[1], obj.data.vertices[triangle.vertices[1]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for", "для поиска всех. def recurse(obj, parent): # Переменая которая содережит функцию необходимую для", "name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[0]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[0]].uv[0], obj.data.uv_layers.active.data[triangle.loops[0]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[0]].uv[0], obj.data.uv_layers[name].data[triangle.loops[0]].uv[1])", "если нет то создаем. checkcreate_dirs(path_project_save) # Если поле имени файла заполнено, то объеденяем", "слои. for col in obj.data.vertex_colors: # Если имя не совподает с активным. if", "geom.addPrimitive(prim) return geom def select_not_quad(obj): not_quad = [] for poly in obj.data.polygons: if", "time: {}'.format(obj.name, datetime.now() - start_time), \"Message\") return {'FINISHED'} class ExportScene(bpy.types.Operator): bl_idname = \"ui.export_scene\"", "== \"CAMERA\": if obj.data.type != 'PANO': create_object = camera_create # Если есть родитель.", "panda3d.core import GeomVertexArrayFormat, Geom, GeomVertexFormat, GeomVertexData, GeomVertexWriter, Triangulator3, GeomTriangles from panda3d.core import GeomNode,", "color_vertex_list: color_vertex_list[name].set_row(triangle.loops[2]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[2]].color[0], obj.data.vertex_colors.active.data[triangle.loops[2]].color[1], obj.data.vertex_colors.active.data[triangle.loops[2]].color[2], obj.data.vertex_colors.active.data[triangle.loops[2]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[2]].color[0], obj.data.vertex_colors[name].data[triangle.loops[2]].color[1],", "add_polygons_to_dict(dict_named, poly, obj): # Если нет такого ключа в словаре. if not obj.data.materials[poly.material_index].name", "= {'MESH': False, 'PERSP': False, 'ORTHO': False, 'CAMERA':True} def show_message_box(message = \"\", title", "модели. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, obj.hatcher.rel_path_object) # Проверяем существует ли директория, если нет то", "vertex_position.set_row(triangle.loops[2]) normal_vertex.set_row(triangle.loops[2]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[2]].co[0], obj.data.vertices[triangle.vertices[2]].co[1], obj.data.vertices[triangle.vertices[2]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[2]].normal[0], obj.data.vertices[triangle.vertices[2]].normal[1], obj.data.vertices[triangle.vertices[2]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1],", "os.makedirs(path_project_save) except OSError as error: #print(error) pass def bam_writer_file(path_save, obj): file = BamFile()", "obj.data.vertex_colors.active.data[triangle.loops[0]].color[1], obj.data.vertex_colors.active.data[triangle.loops[0]].color[2], obj.data.vertex_colors.active.data[triangle.loops[0]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[0]].color[0], obj.data.vertex_colors[name].data[triangle.loops[0]].color[1], obj.data.vertex_colors[name].data[triangle.loops[0]].color[2], obj.data.vertex_colors[name].data[triangle.loops[0]].color[3]) list_vertext[triangle.loops[0]] = None # Обработка", "# Если есть слот материала и он содержит имя, рассортировываем их по словарям", "у объекта. if obj.data.uv_layers.active: texcoord = True # Создаем колонку для координат c", "else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[2]) if name", "obj.data.polygons: if len(poly.vertices) >= 5: not_quad.append(poly) for i in obj.data.vertices: i.select=False for i", "компланарных прямольников. for poly in coplanar: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad =", "Если у полигона четыре вершины, необходимо проверить на компланарность. elif len(poly.vertices) == 4:", "рассортировываем их по словарям под этим именем. if hasattr(obj.data.materials[poly.material_index], 'name'): # Если полигон", "name): if name in collision_node_dict: collision_node_dict[name].add_solid(quad) else: collision_node = CollisionNode(name) collision_node.add_solid(quad) collision_node_dict[name] =", "не совподает с активным. if not uv.name == obj.data.uv_layers.active.name: texcoord_vertex_list[uv.name] = GeomVertexWriter(vdata, 'texcoord.{}'.format(uv.name))", "else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[1]].color[0], obj.data.vertex_colors[name].data[triangle.loops[1]].color[1], obj.data.vertex_colors[name].data[triangle.loops[1]].color[2], obj.data.vertex_colors[name].data[triangle.loops[1]].color[3]) list_vertext[triangle.loops[1]] = None # Обработка третьей вершины. if", "if not obj.data.type == 'PANO': node = build_hierarchy(obj, context.scene) root.add_child(node) else: node =", "== 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[0]].color[0], obj.data.vertex_colors.active.data[triangle.loops[0]].color[1], obj.data.vertex_colors.active.data[triangle.loops[0]].color[2], obj.data.vertex_colors.active.data[triangle.loops[0]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[0]].color[0], obj.data.vertex_colors[name].data[triangle.loops[0]].color[1], obj.data.vertex_colors[name].data[triangle.loops[0]].color[2], obj.data.vertex_colors[name].data[triangle.loops[0]].color[3]) list_vertext[triangle.loops[0]] =", "for name in named_coplanar: for poly in named_coplanar[name]: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co))", "[] not_coplanar = [] not_quad = [] # Перебираем полигоны объекта. for poly", "= CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) from_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.from_mask_1.decode('utf-8'), obj.hatcher.from_mask_2.decode('utf-8'), obj.hatcher.from_mask_3.decode('utf-8'), obj.hatcher.from_mask_4.decode('utf-8'), obj.hatcher.from_mask_5.decode('utf-8'), obj.hatcher.from_mask_6.decode('utf-8'),", "'ORTHO': False, 'CAMERA':True} def show_message_box(message = \"\", title = \"Message Box\", icon =", "[poly] else: # Если есть такой ключ, добавляем к списку. dict_named[obj.data.materials[poly.material_index].name].append(poly) def colnode_add_dict(collision_node_dict,", "lens = PerspectiveLens() if obj.data.type == 'ORTHO': lens = OrthographicLens() lens.set_film_size(abs(frame_size[0][0]) + abs(frame_size[1][0]),", "else: collision_node = CollisionNode(name) collision_node.add_solid(quad) collision_node_dict[name] = collision_node def collision_polygon_create(obj, scene): named_triangles =", "obj.hatcher.type_mesh == \"Collision\": create_object = collision_polygon_create # Если объект является источником цвета. if", "if not os.path.exists(path_project_save): try: os.makedirs(path_project_save) except OSError as error: #print(error) pass def bam_writer_file(path_save,", "for obj in context.selected_objects: # Объединяем путь проекта и относительную директорию модели. path_project_save", "такого ключа в словаре. if not obj.data.materials[poly.material_index].name in dict_named: # Дабавляем ключ и", "abs(frame_size[1][0]), abs(frame_size[0][1]) + abs(frame_size[1][1])) lens.set_focal_length(abs(frame_size[0][2])) lens.set_near_far(obj.data.clip_start, obj.data.clip_end) if obj.hatcher.coordinate_system == \"CS_default\": lens.set_coordinate_system(CS_default) if", "obj.data.vertex_colors[name].data[triangle.loops[1]].color[3]) list_vertext[triangle.loops[1]] = None # Обработка третьей вершины. if not triangle.loops[2] in list_vertext:", "# Нужно разбить некомпланарные полигоны, на треугольники. for poly in named_not_coplanar[name]: for vertext", "if obj.hatcher.coordinate_system == \"CS_default\": lens.set_coordinate_system(CS_default) if obj.hatcher.coordinate_system == \"CS_zup_right\": lens.set_coordinate_system(CS_zup_right) if obj.hatcher.coordinate_system ==", "Если имя не совподает с активным. if not col.name == obj.data.vertex_colors.active.name: geom_vertex_format.add_column('color.{}'.format(col.name), 4,", "node_path.reparentTo(group) if obj.data.materials[collision_node.name].hatcher.visibility_collision_polygons: node_path.show() collision_node = CollisionNode(obj.name) # Создаем полигоны столкновения из треугольников.", "\"CAMERA\": if obj.data.type != 'PANO': create_object = camera_create # Если есть родитель. if", "c именем по умолчанию. geom_vertex_format.add_column(\"texcoord\", 2, Geom.NT_float32, Geom.C_texcoord) # Так же создаем дополнительные", "путь директории и имя сцены. path_save = os.path.join(path_project_save, context.scene.name) bam_writer_file(path_save, root) show_message_box('Export scene,", "# Перебираем список выбранных объектов. for obj in context.selected_objects: # Проверим есть ли", "Point3(*obj.matrix_world.to_scale()) transform = TransformState.make_pos_quat_scale(pos, quat, scale) return transform def get_format(obj): color = False", "obj.data.vertices[poly.vertices[2]].co - obj.data.vertices[poly.vertices[0]].co for index in poly.vertices[3:]: if abs(distance_point_to_plane(obj.data.vertices[index].co, obj.data.vertices[poly.vertices[0]].co, v1.cross(v2))) < 1e-6:", "in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]) collision_node.add_solid(quad) vertext_quad = [] #", "2, Geom.NT_float32, Geom.C_texcoord) # Создаем формат. my_format = GeomVertexFormat() my_format.addArray(geom_vertex_format) # Регистрируем формат.", "prim = GeomTriangles(Geom.UHStatic) prim.makeIndexed() prim.setIndexType(Geom.NT_uint32) mesh = obj.data mesh.calc_loop_triangles() # Сюда записиваются индексы", "root.add_child(node) # Объединяем путь директории и имя файла. path_save = os.path.join(path_project_save, context.scene.hatcher.file_name_selected) bam_writer_file(path_save,", "== 'PERSP': lens = PerspectiveLens() if obj.data.type == 'ORTHO': lens = OrthographicLens() lens.set_film_size(abs(frame_size[0][0])", "спискам else: # Если полигон из трех вершин, проверка на компланарность не нужна.", "= triangulator3.get_vertex(triangulator3.get_triangle_v2(i)) trangle[i] = ((v0[0], v0[1], v0[2]), (v1[0], v1[1], v1[2]), (v2[0], v2[1], v2[2]))", "obj).values(): quad = CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) from_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.from_mask_1.decode('utf-8'), obj.hatcher.from_mask_2.decode('utf-8'), obj.hatcher.from_mask_3.decode('utf-8'), obj.hatcher.from_mask_4.decode('utf-8'),", "многоугольники на треугольники. for poly in named_not_quad[name]: for vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict,", "Нужно разбить полигоны у которых более четырех сторон на треугольники. for poly in", "Camera, PerspectiveLens, OrthographicLens, CS_default, CS_zup_right, CS_yup_right, CS_zup_left, CS_yup_left, CS_invalid from panda3d.core import GeomVertexArrayFormat,", "в словаре. if not obj.data.materials[poly.material_index].name in dict_named: # Дабавляем ключ и список. dict_named[obj.data.materials[poly.material_index].name]", "if not obj.data.materials[poly.material_index].name in dict_named: # Дабавляем ключ и список. dict_named[obj.data.materials[poly.material_index].name] = [poly]", "прямольников. for poly in coplanar: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0],", "vertext_quad[2], vertext_quad[3]), name) vertext_quad = [] # Создаем полигоны столкновения из некомпланарных прямольников.", "объектов. for obj in context.selected_objects: # Объединяем путь проекта и относительную директорию модели.", "execute(self, context): select_not_coplanar(context.object) return {'FINISHED'} class CheckingQuad(bpy.types.Operator): bl_idname = \"ui.check_quad\" bl_label = \"Checking_quad\"", "GeomTriangles(Geom.UHStatic) prim.makeIndexed() prim.setIndexType(Geom.NT_uint32) mesh = obj.data mesh.calc_loop_triangles() # Сюда записиваются индексы обработаных вершин.", "v2 = triangulator3.get_vertex(triangulator3.get_triangle_v2(i)) trangle[i] = ((v0[0], v0[1], v0[2]), (v1[0], v1[1], v1[2]), (v2[0], v2[1],", "geom_create(obj) geom_node = GeomNode(obj.data.name) geom_node.addGeom(geom) return geom_node def camera_create(obj, scene): frame_size = obj.data.view_frame(scene", "Регистрируем формат. end_format = GeomVertexFormat.registerFormat(my_format) return end_format, color, texcoord def geom_create(obj): geom_vertex_format =", "добавляем к списку. dict_named[obj.data.materials[poly.material_index].name].append(poly) def colnode_add_dict(collision_node_dict, quad, name): if name in collision_node_dict: collision_node_dict[name].add_solid(quad)", "= triangulator3.get_vertex(triangulator3.get_triangle_v0(i)) v1 = triangulator3.get_vertex(triangulator3.get_triangle_v1(i)) v2 = triangulator3.get_vertex(triangulator3.get_triangle_v2(i)) trangle[i] = ((v0[0], v0[1], v0[2]),", "camera def build_hierarchy(obj, scene): # Узел для формирование иерархии root = NodePath(\"root\") #", "obj.hatcher.into_mask_6.decode('utf-8'), obj.hatcher.into_mask_7.decode('utf-8'), obj.hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) # Если полигон столкновения содержит тела. if collision_node.getNumSolids()", "колонку для цвета c именем по умолчанию. geom_vertex_format.add_column(\"color\", 4, Geom.NT_uint8, Geom.C_color) # Так", "имя файла. path_save = os.path.join(path_project_save, obj.name) node = build_hierarchy(obj, context.scene) root = ModelRoot('{}.bam'.format(obj.name))", "= obj.hatcher.camera_active bit = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.draw_mask_1.decode('utf-8'), obj.hatcher.draw_mask_2.decode('utf-8'), obj.hatcher.draw_mask_3.decode('utf-8'), obj.hatcher.draw_mask_4.decode('utf-8'), obj.hatcher.draw_mask_5.decode('utf-8'), obj.hatcher.draw_mask_6.decode('utf-8'), obj.hatcher.draw_mask_7.decode('utf-8'), obj.hatcher.draw_mask_8.decode('utf-8')) camera.camera_mask", "\"ui.export_selected\" bl_label = \"Generator_selected\" def execute(self, context): start_time = datetime.now() context.view_layer.update() # Объединяем", "имя не совподает с активным. if not uv.name == obj.data.uv_layers.active.name: geom_vertex_format.add_column('texcoord.{}'.format(uv.name), 2, Geom.NT_float32,", "ли директория, если нет то создаем. checkcreate_dirs(path_project_save) # Объединяем путь директории и имя", "len(poly.vertices) == 3: for index in poly.vertices[2:]: add_polygons_to_dict(named_triangles, poly, obj) # Если у", "triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[2]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[2]].uv[0],", "необходимо проверить на компланарность. elif len(poly.vertices) == 4: if check_coplanar(obj, poly): coplanar.append(poly) else:", "слои. for uv in obj.data.uv_layers: # Если имя не совподает с активным. if", "in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) for collision_node in collision_node_dict.values(): from_mask", "root.find('**/{}'.format(parent.name)) if result: np.reparentTo(result) np.set_transform(root, conversion_transform(obj)) else: np.reparentTo(root) np.set_transform(root, conversion_transform(obj)) # Проходим по", "цвета. if obj.type == \"LIGHT\": create_object = \"LIGHT\" # Если объект является камерой.", "into_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.into_mask_1.decode('utf-8'), obj.hatcher.into_mask_2.decode('utf-8'), obj.hatcher.into_mask_3.decode('utf-8'), obj.hatcher.into_mask_4.decode('utf-8'), obj.hatcher.into_mask_5.decode('utf-8'), obj.hatcher.into_mask_6.decode('utf-8'), obj.hatcher.into_mask_7.decode('utf-8'), obj.hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) #", "активным. if not col.name == obj.data.vertex_colors.active.name: color_vertex_list[col.name] = GeomVertexWriter(vdata, 'color.{}'.format(col.name)) # Если используются", "столкновения из компланарных прямольников. for name in named_coplanar: for poly in named_coplanar[name]: for", "GeomVertexData(obj.data.name, geom_vertex_format[0], Geom.UHStatic) vdata.set_num_rows(len(obj.data.vertices)) vertex_position = GeomVertexWriter(vdata, 'vertex') normal_vertex = GeomVertexWriter(vdata, 'normal') #", "node_path.show() collision_node = CollisionNode(obj.name) # Создаем полигоны столкновения из треугольников. for poly in", "== \"MESH\": if obj.hatcher.type_mesh == \"Render\": create_object = geom_node_create if obj.hatcher.type_mesh == \"Collision\":", "= geom_vertex_format[1] texcoord = geom_vertex_format[2] vdata = GeomVertexData(obj.data.name, geom_vertex_format[0], Geom.UHStatic) vdata.set_num_rows(len(obj.data.vertices)) vertex_position =", "CS_default, CS_zup_right, CS_yup_right, CS_zup_left, CS_yup_left, CS_invalid from panda3d.core import GeomVertexArrayFormat, Geom, GeomVertexFormat, GeomVertexData,", "if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[0]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[0]].uv[0], obj.data.uv_layers.active.data[triangle.loops[0]].uv[1])", "for collision_node in collision_node_dict.values(): from_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.from_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_7.decode('utf-8'),", "completed, time: {}'.format(obj.name, datetime.now() - start_time), \"Message\") return {'FINISHED'} class ExportScene(bpy.types.Operator): bl_idname =", "из трех вершин, проверка на компланарность не нужна. if len(poly.vertices) == 3: for", "[] # Создаем полигоны столкновения из компланарных прямольников. for poly in coplanar: for", "poly in not_quad: poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def check_coplanar(obj, poly): status =", "= CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]) collision_node.add_solid(quad) vertext_quad = [] # Нужно разбить некомпланарные", "корень для объединения. root = ModelRoot('{}.bam'.format(context.scene.name)) # Пройдем по всем объектом в сцене.", "формат. my_format = GeomVertexFormat() my_format.addArray(geom_vertex_format) # Регистрируем формат. end_format = GeomVertexFormat.registerFormat(my_format) return end_format,", "in collision_node_dict: collision_node_dict[name].add_solid(quad) else: collision_node = CollisionNode(name) collision_node.add_solid(quad) collision_node_dict[name] = collision_node def collision_polygon_create(obj,", "Geom.UHStatic) vdata.set_num_rows(len(obj.data.vertices)) vertex_position = GeomVertexWriter(vdata, 'vertex') normal_vertex = GeomVertexWriter(vdata, 'normal') # Если используются", "5: not_quad.append(poly) for i in obj.data.vertices: i.select=False for i in obj.data.edges: i.select=False for", "True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def check_coplanar(obj, poly): status = False # Если вершины три,", "нет материала, то рассортировываем по спискам else: # Если полигон из трех вершин,", "Создаем полигоны столкновения из компланарных прямольников. for name in named_coplanar: for poly in", "in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) # Создаем полигоны столкновения из", "не нужна. if len(poly.vertices) == 3: for index in poly.vertices[2:]: add_polygons_to_dict(named_triangles, poly, obj)", "obj.hatcher.draw_mask_5.decode('utf-8'), obj.hatcher.draw_mask_6.decode('utf-8'), obj.hatcher.draw_mask_7.decode('utf-8'), obj.hatcher.draw_mask_8.decode('utf-8')) camera.camera_mask = int(bit, 2) camera.set_lens(lens) return camera def build_hierarchy(obj,", "for poly in obj.data.polygons: # Если список материалов не пуст. if len(obj.data.materials) >=", "на компланарность не нужна. if len(poly.vertices) == 3: for index in poly.vertices[2:]: add_polygons_to_dict(named_triangles,", "то создаем. checkcreate_dirs(path_project_save) # Объединяем путь директории и имя файла. path_save = os.path.join(path_project_save,", "((v0[0], v0[1], v0[2]), (v1[0], v1[1], v1[2]), (v2[0], v2[1], v2[2])) return trangle def add_polygons_to_dict(dict_named,", "geom_vertex_format[1] texcoord = geom_vertex_format[2] vdata = GeomVertexData(obj.data.name, geom_vertex_format[0], Geom.UHStatic) vdata.set_num_rows(len(obj.data.vertices)) vertex_position = GeomVertexWriter(vdata,", "if obj.data.vertex_colors.active: color = True # Создаем колонку для цвета c именем по", "collision_node.add_solid(quad) vertext_quad = [] # Нужно разбить некомпланарные полигоны, на треугольники. for poly", "triangulator3.get_vertex(triangulator3.get_triangle_v0(i)) v1 = triangulator3.get_vertex(triangulator3.get_triangle_v1(i)) v2 = triangulator3.get_vertex(triangulator3.get_triangle_v2(i)) trangle[i] = ((v0[0], v0[1], v0[2]), (v1[0],", "# Создаем полигоны столкновения из треугольников. for poly in triangles: for index in", "у полигона четыре вершины, необходимо проверить на компланарность. elif len(poly.vertices) == 4: #", "GeomVertexWriter(vdata, 'normal') # Если используются цвета вершин. if color: color_vertex_list = {'color': GeomVertexWriter(vdata,", "{} # Проходим по треугольниуам. for triangle in mesh.loop_triangles: # Обработка первой вершины.", "создаем дополнительные колонки. for uv in obj.data.uv_layers: # Если имя не совподает с", "if obj.hatcher.visibility_collision_polygons: node_path.show() return group.node().getChild(0) def geom_node_create(obj, scene): geom = geom_create(obj) geom_node =", "triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[1]) if name == 'texcoord':", "= True else: status = False return status def select_not_coplanar(obj): not_coplanar = []", "NodePath(\"root\") # Выполним рекурсию, для поиска всех. def recurse(obj, parent): # Переменая которая", "= GeomVertexArrayFormat() # Создаем колонку для вершин. geom_vertex_format.add_column(\"vertex\", 3, Geom.NT_float32, Geom.C_point) geom_vertex_format.add_column(\"normal\", 3,", "= ModelRoot('{}.bam'.format(obj.name)) root.add_child(node) bam_writer_file(path_save, root) show_message_box('Export object: {} completed, time: {}'.format(obj.name, datetime.now() -", "автоматически копланарен. if len(poly.vertices) == 3: status = True elif len(poly.vertices) >= 3:", "start_time = datetime.now() context.view_layer.update() # Объединяем путь проекта и относительную директорию сцены. path_project_save", "vdata.set_num_rows(len(obj.data.vertices)) vertex_position = GeomVertexWriter(vdata, 'vertex') normal_vertex = GeomVertexWriter(vdata, 'normal') # Если используются цвета", "на треугольники. elif len(poly.vertices) >= 4: not_quad.append(poly) else: # Если полигон из трех", "import bmesh from mathutils.geometry import distance_point_to_plane ostream = Notify.out() list_object_support = {'MESH': False,", "vertext_quad = [] # Нужно разбить некомпланарные полигоны, на треугольники. for poly in", "group = NodePath(obj.name) collision_node_dict = {} vertext_quad = [] # Создаем полигоны столкновения", "= PerspectiveLens() if obj.data.type == 'ORTHO': lens = OrthographicLens() lens.set_film_size(abs(frame_size[0][0]) + abs(frame_size[1][0]), abs(frame_size[0][1])", "ли данный тип объекта среди поддерживаемых. if obj.type in list_object_support: # Если есть", "result: np.reparentTo(result) np.set_transform(root, conversion_transform(obj)) else: np.reparentTo(root) np.set_transform(root, conversion_transform(obj)) # Проходим по детям. for", "if len(poly.vertices) == 3: for index in poly.vertices[2:]: add_polygons_to_dict(named_triangles, poly, obj) # Если", "необходимо разбить на треугольники. elif len(poly.vertices) >= 4: not_quad.append(poly) else: # Если полигон", "полигона четыре вершины, необходимо проверить на компланарность. elif len(poly.vertices) == 4: if check_coplanar(obj,", "root = ModelRoot('{}.bam'.format(context.scene.name)) # Пройдем по всем объектом в сцене. for obj in", "из компланарных прямольников. for poly in coplanar: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad", "не пуст. if len(obj.data.materials) >= 1: # Если есть слот материала и он", "context.selected_objects: # Объединяем путь проекта и относительную директорию модели. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, obj.hatcher.rel_path_object)", "for i in obj.data.polygons: i.select = False for poly in not_quad: poly.select =", "obj) # Если у полигона более четырех вершин, необходимо разбить на треугольники. elif", "named_triangles: for poly in named_triangles[name]: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1],", "некомпланарные полигоны, на треугольники. for poly in not_coplanar: for vertext in triangle_poly(poly, obj).values():", "list_vertext: vertex_position.set_row(triangle.loops[0]) normal_vertex.set_row(triangle.loops[0]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[0]].co[0], obj.data.vertices[triangle.vertices[0]].co[1], obj.data.vertices[triangle.vertices[0]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[0]].normal[0], obj.data.vertices[triangle.vertices[0]].normal[1], obj.data.vertices[triangle.vertices[0]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0],", "else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[1]].uv[0], obj.data.uv_layers[name].data[triangle.loops[1]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[1]) if name ==", "False texcoord = False # Создаем новый массив. geom_vertex_format = GeomVertexArrayFormat() # Создаем", "root = ModelRoot('{}.bam'.format(context.scene.hatcher.file_name_selected)) # Перебираем список выбранных объектов. for obj in context.selected_objects: #", "= {} # Проходим по треугольниуам. for triangle in mesh.loop_triangles: # Обработка первой", "= \"LIGHT\" # Если объект является камерой. if obj.type == \"CAMERA\": if obj.data.type", "in obj.data.polygons: if not check_coplanar(obj, poly): not_coplanar.append(poly) for i in obj.data.vertices: i.select=False for", "vertext_quad = [] # Создаем полигоны столкновения из компланарных прямольников. for name in", "return {'FINISHED'} class ExportScene(bpy.types.Operator): bl_idname = \"ui.export_scene\" bl_label = \"Generator_scene\" def execute(self, context):", "= GeomVertexData(obj.data.name, geom_vertex_format[0], Geom.UHStatic) vdata.set_num_rows(len(obj.data.vertices)) vertex_position = GeomVertexWriter(vdata, 'vertex') normal_vertex = GeomVertexWriter(vdata, 'normal')", "in obj.data.uv_layers: # Если имя не совподает с активным. if not uv.name ==", "- start_time), \"Message\") return {'FINISHED'} class ExportScene(bpy.types.Operator): bl_idname = \"ui.export_scene\" bl_label = \"Generator_scene\"", "столкновения. if obj.hatcher.visibility_collision_polygons: node_path.show() return group.node().getChild(0) def geom_node_create(obj, scene): geom = geom_create(obj) geom_node", "bl_idname = \"ui.export_scene\" bl_label = \"Generator_scene\" def execute(self, context): start_time = datetime.now() context.view_layer.update()", "for index in poly.vertices[3:]: if abs(distance_point_to_plane(obj.data.vertices[index].co, obj.data.vertices[poly.vertices[0]].co, v1.cross(v2))) < 1e-6: status = True", "Проверяем существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save) # Создаем корень для", "= False # Если вершины три, это значит полигон автоматически копланарен. if len(poly.vertices)", "datetime.now() - start_time), \"Message\") return {'FINISHED'} class ExportScene(bpy.types.Operator): bl_idname = \"ui.export_scene\" bl_label =", "check_coplanar(obj, poly): status = False # Если вершины три, это значит полигон автоматически", "BamFile, BamWriter, Filename, Notify from panda3d.core import CollisionPolygon, CollisionNode import bpy import bmesh", "v1 = triangulator3.get_vertex(triangulator3.get_triangle_v1(i)) v2 = triangulator3.get_vertex(triangulator3.get_triangle_v2(i)) trangle[i] = ((v0[0], v0[1], v0[2]), (v1[0], v1[1],", "obj.data.vertices[poly.vertices[0]].co, v1.cross(v2))) < 1e-6: status = True else: status = False return status", "path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_other) # Проверяем существует ли директория, если нет то создаем.", "Создаем полигоны столкновения из многоугольников. for name in named_not_quad: # Нужно разбить многоугольники", "from panda3d.core import Point3, TransformState, LQuaternion from panda3d.core import Camera, PerspectiveLens, OrthographicLens, CS_default,", "[] # Создаем полигоны столкновения из некомпланарных прямольников. for name in named_not_coplanar: #", "OrthographicLens, CS_default, CS_zup_right, CS_yup_right, CS_zup_left, CS_yup_left, CS_invalid from panda3d.core import GeomVertexArrayFormat, Geom, GeomVertexFormat,", "in not_quad: poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def check_coplanar(obj, poly): status = False", "Проверяем существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save) # Если поле имени", "list_object_support = {'MESH': False, 'PERSP': False, 'ORTHO': False, 'CAMERA':True} def show_message_box(message = \"\",", "not col.name == obj.data.vertex_colors.active.name: geom_vertex_format.add_column('color.{}'.format(col.name), 4, Geom.NT_uint8, Geom.C_color) # Проверка есть ли активные", "# Так же создаем дополнительные слои. for uv in obj.data.uv_layers: # Если имя", "in poly.vertices[2:]: add_polygons_to_dict(named_triangles, poly, obj) # Если у полигона четыре вершины, необходимо проверить", "новый массив. geom_vertex_format = GeomVertexArrayFormat() # Создаем колонку для вершин. geom_vertex_format.add_column(\"vertex\", 3, Geom.NT_float32,", "# Если нет материала, то рассортировываем по спискам else: # Если полигон из", "texcoord_vertex_list = {'texcoord': GeomVertexWriter(vdata, 'texcoord')} # Так же создаем дополнительные слои. for uv", "poly in named_not_quad[name]: for vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name)", "Объединяем путь проекта и относительную директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_other) # Проверяем", "triangles.append(poly) # Если у полигона четыре вершины, необходимо проверить на компланарность. elif len(poly.vertices)", "list_object_support[obj.type]: if not obj.data.type == 'PANO': node = build_hierarchy(obj, context.scene) root.add_child(node) else: node", "obj.data.vertex_colors.active.data[triangle.loops[0]].color[2], obj.data.vertex_colors.active.data[triangle.loops[0]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[0]].color[0], obj.data.vertex_colors[name].data[triangle.loops[0]].color[1], obj.data.vertex_colors[name].data[triangle.loops[0]].color[2], obj.data.vertex_colors[name].data[triangle.loops[0]].color[3]) list_vertext[triangle.loops[0]] = None # Обработка второй", "(v1[0], v1[1], v1[2]), (v2[0], v2[1], v2[2])) return trangle def add_polygons_to_dict(dict_named, poly, obj): #", "3: v1 = obj.data.vertices[poly.vertices[1]].co - obj.data.vertices[poly.vertices[0]].co v2 = obj.data.vertices[poly.vertices[2]].co - obj.data.vertices[poly.vertices[0]].co for index", "[] # Создаем полигоны столкновения из треугольников. for name in named_triangles: for poly", "triangulator3.get_vertex(triangulator3.get_triangle_v2(i)) trangle[i] = ((v0[0], v0[1], v0[2]), (v1[0], v1[1], v1[2]), (v2[0], v2[1], v2[2])) return", "- obj.data.vertices[poly.vertices[0]].co v2 = obj.data.vertices[poly.vertices[2]].co - obj.data.vertices[poly.vertices[0]].co for index in poly.vertices[3:]: if abs(distance_point_to_plane(obj.data.vertices[index].co,", "for triangle in mesh.loop_triangles: # Обработка первой вершины. if not triangle.loops[0] in list_vertext:", "Сюда записиваются индексы обработаных вершин. list_vertext = {} # Проходим по треугольниуам. for", "in named_not_coplanar: # Нужно разбить некомпланарные полигоны, на треугольники. for poly in named_not_coplanar[name]:", "поддерживаемых. if obj.type in list_object_support: # Если есть ли подтип. if list_object_support[obj.type]: if", "директории и имя файла. path_save = os.path.join(path_project_save, context.scene.hatcher.file_name_selected) bam_writer_file(path_save, root) # Если нет,", "color = geom_vertex_format[1] texcoord = geom_vertex_format[2] vdata = GeomVertexData(obj.data.name, geom_vertex_format[0], Geom.UHStatic) vdata.set_num_rows(len(obj.data.vertices)) vertex_position", "'{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.into_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) node_path = NodePath(collision_node)", "vertex_position.set_row(triangle.loops[1]) normal_vertex.set_row(triangle.loops[1]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[1]].co[0], obj.data.vertices[triangle.vertices[1]].co[1], obj.data.vertices[triangle.vertices[1]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[1]].normal[0], obj.data.vertices[triangle.vertices[1]].normal[1], obj.data.vertices[triangle.vertices[1]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1],", "= None # Обработка второй вершины. if not triangle.loops[1] in list_vertext: vertex_position.set_row(triangle.loops[1]) normal_vertex.set_row(triangle.loops[1])", "создаем дополнительные колонки. for col in obj.data.vertex_colors: # Если имя не совподает с", "{} named_not_quad = {} triangles = [] coplanar = [] not_coplanar = []", "вершин, проверка на компланарность не нужна. if len(poly.vertices) == 3: for index in", "obj.data.vertices[poly.vertices[0]].co for index in poly.vertices[3:]: if abs(distance_point_to_plane(obj.data.vertices[index].co, obj.data.vertices[poly.vertices[0]].co, v1.cross(v2))) < 1e-6: status =", "проверить на компланарность. elif len(poly.vertices) == 4: # Если полигон компланарный if check_coplanar(obj,", "not_quad: for vertext in triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) from_mask", "# Нужно разбить полигоны у которых более четырех сторон на треугольники. for poly", "директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_scene) # Проверяем существует ли директория, если нет", "if obj.hatcher.coordinate_system == \"CS_yup_right\": lens.set_coordinate_system(CS_yup_right) if obj.hatcher.coordinate_system == \"CS_zup_left\": lens.set_coordinate_system(CS_zup_left) if obj.hatcher.coordinate_system ==", "for index in poly.vertices: triangulator3.add_polygon_vertex(index_tr) triangulator3.add_vertex(*obj.data.vertices[index].co) index_tr += 1 triangulator3.triangulate() for i in", "родитель. if not parent: npp = NodePath(create_object(obj, scene)) #npp.setName(obj.name) #npp.show() npp.reparentTo(root) npp.set_transform(root, conversion_transform(obj))", "index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]), name) vertext_quad = []", "более четырех вершин, необходимо разбить на треугольники. elif len(poly.vertices) >= 4: not_quad.append(poly) else:", "'': # Создаем корень для объединения. root = ModelRoot('{}.bam'.format(context.scene.hatcher.file_name_selected)) # Перебираем список выбранных", "= ModelRoot('{}.bam'.format(context.scene.name)) # Пройдем по всем объектом в сцене. for obj in context.scene.objects:", "recurse(child, obj) recurse(obj, obj.parent) return root.node().getChild(0) import os from datetime import datetime class", "add_polygons_to_dict(named_triangles, poly, obj) # Если у полигона четыре вершины, необходимо проверить на компланарность.", "if not context.scene.hatcher.file_name_selected == '': # Создаем корень для объединения. root = ModelRoot('{}.bam'.format(context.scene.hatcher.file_name_selected))", "# Так же создаем дополнительные колонки. for col in obj.data.vertex_colors: # Если имя", "GeomVertexFormat() my_format.addArray(geom_vertex_format) # Регистрируем формат. end_format = GeomVertexFormat.registerFormat(my_format) return end_format, color, texcoord def", "# Так же создаем дополнительные колонки. for uv in obj.data.uv_layers: # Если имя", "{} completed, time: {}'.format(obj.name, datetime.now() - start_time), \"Message\") return {'FINISHED'} class ExportScene(bpy.types.Operator): bl_idname", "else: # Если нет родителя. np = NodePath(create_object(obj, scene)) #np.setName(obj.name) #np.show() # Проверяем", "объекта. create_object = None # Если объект является сеткой. if obj.type == \"MESH\":", "= [] # Создаем полигоны столкновения из компланарных прямольников. for poly in coplanar:", "return geom def select_not_quad(obj): not_quad = [] for poly in obj.data.polygons: if len(poly.vertices)", "import GeomVertexArrayFormat, Geom, GeomVertexFormat, GeomVertexData, GeomVertexWriter, Triangulator3, GeomTriangles from panda3d.core import GeomNode, PandaNode,", "2)) node_path = NodePath(collision_node) node_path.reparentTo(group) if obj.data.materials[collision_node.name].hatcher.visibility_collision_polygons: node_path.show() collision_node = CollisionNode(obj.name) # Создаем", "texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[2]].uv[0], obj.data.uv_layers.active.data[triangle.loops[2]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[2]].uv[0], obj.data.uv_layers[name].data[triangle.loops[2]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[2]) if", "дополнительные слои. for uv in obj.data.uv_layers: # Если имя не совподает с активным.", "{'color': GeomVertexWriter(vdata, 'color')} # Так же создаем дополнительные слои. for col in obj.data.vertex_colors:", "'texcoord.{}'.format(uv.name)) # Запишем порядок треугольников. prim = GeomTriangles(Geom.UHStatic) prim.makeIndexed() prim.setIndexType(Geom.NT_uint32) mesh = obj.data", "= True elif len(poly.vertices) >= 3: v1 = obj.data.vertices[poly.vertices[1]].co - obj.data.vertices[poly.vertices[0]].co v2 =", "= CollisionNode(obj.name) # Создаем полигоны столкновения из треугольников. for poly in triangles: for", "show_message_box(message = \"\", title = \"Message Box\", icon = 'INFO'): def draw(self, context):", "# Создаем колонку для вершин. geom_vertex_format.add_column(\"vertex\", 3, Geom.NT_float32, Geom.C_point) geom_vertex_format.add_column(\"normal\", 3, Geom.NT_float32, Geom.C_normal)", "obj in context.selected_objects: # Объединяем путь проекта и относительную директорию модели. path_project_save =", "# Проверяем существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save) # Если поле", "self.layout.label(text = message) bpy.context.window_manager.popup_menu(draw, title = title, icon = icon) def checkcreate_dirs(path_project_save): #", "полигон компланарный if check_coplanar(obj, poly): add_polygons_to_dict(named_coplanar, poly, obj) else: add_polygons_to_dict(named_not_coplanar, poly, obj) #", "GeomNode(obj.data.name) geom_node.addGeom(geom) return geom_node def camera_create(obj, scene): frame_size = obj.data.view_frame(scene = scene) if", "obj.data.vertex_colors.active.data[triangle.loops[0]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[0]].color[0], obj.data.vertex_colors[name].data[triangle.loops[0]].color[1], obj.data.vertex_colors[name].data[triangle.loops[0]].color[2], obj.data.vertex_colors[name].data[triangle.loops[0]].color[3]) list_vertext[triangle.loops[0]] = None # Обработка второй вершины.", "quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]) collision_node.add_solid(quad) vertext_quad = [] # Нужно разбить", "uv.name == obj.data.uv_layers.active.name: geom_vertex_format.add_column('texcoord.{}'.format(uv.name), 2, Geom.NT_float32, Geom.C_texcoord) # Создаем формат. my_format = GeomVertexFormat()", "import BamFile, BamWriter, Filename, Notify from panda3d.core import CollisionPolygon, CollisionNode import bpy import", "== '': # Создаем корень для объединения. root = ModelRoot('{}.bam'.format(context.scene.hatcher.file_name_selected)) # Перебираем список", "файл. if not context.scene.hatcher.file_name_selected == '': # Создаем корень для объединения. root =", "создаем. if not os.path.exists(path_project_save): try: os.makedirs(path_project_save) except OSError as error: #print(error) pass def", "for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]) collision_node.add_solid(quad) vertext_quad", "= None # Обработка третьей вершины. if not triangle.loops[2] in list_vertext: vertex_position.set_row(triangle.loops[2]) normal_vertex.set_row(triangle.loops[2])", "os.path.join(path_project_save, context.scene.name) bam_writer_file(path_save, root) show_message_box('Export scene, completed, time: {}'.format(datetime.now() - start_time), \"Message\") return", "None # Обработка третьей вершины. if not triangle.loops[2] in list_vertext: vertex_position.set_row(triangle.loops[2]) normal_vertex.set_row(triangle.loops[2]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[2]].co[0],", "from panda3d.core import GeomNode, PandaNode, NodePath, ModelRoot from panda3d.core import BamFile, BamWriter, Filename,", "obj.data.uv_layers.active.name: texcoord_vertex_list[uv.name] = GeomVertexWriter(vdata, 'texcoord.{}'.format(uv.name)) # Запишем порядок треугольников. prim = GeomTriangles(Geom.UHStatic) prim.makeIndexed()", "len(poly.vertices) >= 3: v1 = obj.data.vertices[poly.vertices[1]].co - obj.data.vertices[poly.vertices[0]].co v2 = obj.data.vertices[poly.vertices[2]].co - obj.data.vertices[poly.vertices[0]].co", "for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[0]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[0]].uv[0], obj.data.uv_layers.active.data[triangle.loops[0]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[0]].uv[0],", "False for poly in not_quad: poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def check_coplanar(obj, poly):", "# Объединяем путь проекта и относительную директорию модели. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, obj.hatcher.rel_path_object) #", "= \"ui.export_object\" bl_label = \"Generator_object\" def execute(self, context): start_time = datetime.now() context.view_layer.update() #", "= datetime.now() context.view_layer.update() # Перебираем список выбранных объектов. for obj in context.selected_objects: #", "scene): frame_size = obj.data.view_frame(scene = scene) if obj.data.type == 'PERSP': lens = PerspectiveLens()", "формирование иерархии root = NodePath(\"root\") # Выполним рекурсию, для поиска всех. def recurse(obj,", "\"CS_yup_left\": lens.set_coordinate_system(CS_yup_left) if obj.hatcher.coordinate_system == \"CS_invalid\": lens.set_coordinate_system(CS_invalid) camera = Camera(obj.data.name) camera.active = obj.hatcher.camera_active", "obj.hatcher.into_mask_4.decode('utf-8'), obj.hatcher.into_mask_5.decode('utf-8'), obj.hatcher.into_mask_6.decode('utf-8'), obj.hatcher.into_mask_7.decode('utf-8'), obj.hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) # Если полигон столкновения содержит тела.", "if obj.hatcher.coordinate_system == \"CS_invalid\": lens.set_coordinate_system(CS_invalid) camera = Camera(obj.data.name) camera.active = obj.hatcher.camera_active bit =", "in obj.data.vertex_colors: # Если имя не совподает с активным. if not col.name ==", "GeomVertexWriter(vdata, 'texcoord.{}'.format(uv.name)) # Запишем порядок треугольников. prim = GeomTriangles(Geom.UHStatic) prim.makeIndexed() prim.setIndexType(Geom.NT_uint32) mesh =", "треугольников. for name in named_triangles: for poly in named_triangles[name]: for index in poly.vertices:", "же создаем дополнительные слои. for col in obj.data.vertex_colors: # Если имя не совподает", "относительную директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_other) # Проверяем существует ли директория, если", "False # Создаем новый массив. geom_vertex_format = GeomVertexArrayFormat() # Создаем колонку для вершин.", "= build_hierarchy(obj, context.scene) root.add_child(node) # Объединяем путь директории и имя сцены. path_save =", "context.selected_objects: # Проверим есть ли данный тип объекта среди поддерживаемых. if obj.type in", "активные текстурные координаты у объекта. if obj.data.uv_layers.active: texcoord = True # Создаем колонку", "то раздельно. else: # Перебираем список выбранных объектов. for obj in context.selected_objects: #", "данный тип объекта среди поддерживаемых. if obj.type in list_object_support: # Если есть ли", "from panda3d.core import GeomVertexArrayFormat, Geom, GeomVertexFormat, GeomVertexData, GeomVertexWriter, Triangulator3, GeomTriangles from panda3d.core import", "ключ, добавляем к списку. dict_named[obj.data.materials[poly.material_index].name].append(poly) def colnode_add_dict(collision_node_dict, quad, name): if name in collision_node_dict:", "len(poly.vertices) == 3: status = True elif len(poly.vertices) >= 3: v1 = obj.data.vertices[poly.vertices[1]].co", "четырех вершин, необходимо разбить на треугольники. elif len(poly.vertices) >= 4: not_quad.append(poly) ######################## ########################", "второй вершины. if not triangle.loops[1] in list_vertext: vertex_position.set_row(triangle.loops[1]) normal_vertex.set_row(triangle.loops[1]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[1]].co[0], obj.data.vertices[triangle.vertices[1]].co[1], obj.data.vertices[triangle.vertices[1]].co[2]) if", "name) for collision_node in collision_node_dict.values(): from_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.from_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_6.decode('utf-8'),", "context.view_layer.update() # Перебираем список выбранных объектов. for obj in context.selected_objects: # Объединяем путь", "def build_hierarchy(obj, scene): # Узел для формирование иерархии root = NodePath(\"root\") # Выполним", "quad = CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) # Нужно разбить полигоны у которых более", "# Если имя не совподает с активным. if not uv.name == obj.data.uv_layers.active.name: texcoord_vertex_list[uv.name]", "obj.data.type == 'ORTHO': lens = OrthographicLens() lens.set_film_size(abs(frame_size[0][0]) + abs(frame_size[1][0]), abs(frame_size[0][1]) + abs(frame_size[1][1])) lens.set_focal_length(abs(frame_size[0][2]))", "name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[1]].uv[0], obj.data.uv_layers.active.data[triangle.loops[1]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[1]].uv[0], obj.data.uv_layers[name].data[triangle.loops[1]].uv[1]) if color: for name in", "datetime import datetime class ExportObject(bpy.types.Operator): bl_idname = \"ui.export_object\" bl_label = \"Generator_object\" def execute(self,", "'color')} # Так же создаем дополнительные слои. for col in obj.data.vertex_colors: # Если", "полигон столкновения содержит тела. if collision_node.getNumSolids() >= 1: node_path = NodePath(collision_node) node_path.reparentTo(group) #", "Объединяем путь проекта и относительную директорию модели. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, obj.hatcher.rel_path_object) # Проверяем", "GeomVertexData, GeomVertexWriter, Triangulator3, GeomTriangles from panda3d.core import GeomNode, PandaNode, NodePath, ModelRoot from panda3d.core", "in obj.data.edges: i.select=False for i in obj.data.polygons: i.select = False for poly in", "obj.type == \"CAMERA\": if obj.data.type != 'PANO': create_object = camera_create # Если есть", "и имя файла. path_save = os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) show_message_box('Export selected, completed, time:", "def draw(self, context): self.layout.label(text = message) bpy.context.window_manager.popup_menu(draw, title = title, icon = icon)", "более четырех сторон на треугольники. for poly in not_quad: for vertext in triangle_poly(poly,", "= '{}{}{}{}{}{}{}{}'.format(obj.hatcher.into_mask_1.decode('utf-8'), obj.hatcher.into_mask_2.decode('utf-8'), obj.hatcher.into_mask_3.decode('utf-8'), obj.hatcher.into_mask_4.decode('utf-8'), obj.hatcher.into_mask_5.decode('utf-8'), obj.hatcher.into_mask_6.decode('utf-8'), obj.hatcher.into_mask_7.decode('utf-8'), obj.hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) # Если", "obj.data.type == 'PANO': node = build_hierarchy(obj, context.scene) # Объединяем путь директории и имя", "not obj.data.type == 'PANO': node = build_hierarchy(obj, context.scene) root.add_child(node) else: node = build_hierarchy(obj,", "PerspectiveLens() if obj.data.type == 'ORTHO': lens = OrthographicLens() lens.set_film_size(abs(frame_size[0][0]) + abs(frame_size[1][0]), abs(frame_size[0][1]) +", "time: {}'.format(datetime.now() - start_time), \"Message\") return {'FINISHED'} class CheckingCoplanarity(bpy.types.Operator): bl_idname = \"ui.check_coplanarity\" bl_label", "return {'FINISHED'} class ExportSelected(bpy.types.Operator): bl_idname = \"ui.export_selected\" bl_label = \"Generator_selected\" def execute(self, context):", "Обработка второй вершины. if not triangle.loops[1] in list_vertext: vertex_position.set_row(triangle.loops[1]) normal_vertex.set_row(triangle.loops[1]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[1]].co[0], obj.data.vertices[triangle.vertices[1]].co[1], obj.data.vertices[triangle.vertices[1]].co[2])", "на треугольники. for poly in not_quad: for vertext in triangle_poly(poly, obj).values(): quad =", "obj.hatcher.visibility_collision_polygons: node_path.show() return group.node().getChild(0) def geom_node_create(obj, scene): geom = geom_create(obj) geom_node = GeomNode(obj.data.name)", "= ModelRoot('{}.bam'.format(context.scene.hatcher.file_name_selected)) # Перебираем список выбранных объектов. for obj in context.selected_objects: # Проверим", "obj.data.vertex_colors[name].data[triangle.loops[2]].color[2], obj.data.vertex_colors[name].data[triangle.loops[2]].color[3]) list_vertext[triangle.loops[2]] = None # Добавляем вершины в примитив. prim.addVertices(triangle.loops[0], triangle.loops[1], triangle.loops[2])", "'PANO': node = build_hierarchy(obj, context.scene) root.add_child(node) else: node = build_hierarchy(obj, context.scene) root.add_child(node) #", "obj.data.vertex_colors.active.name: geom_vertex_format.add_column('color.{}'.format(col.name), 4, Geom.NT_uint8, Geom.C_color) # Проверка есть ли активные текстурные координаты у", "color: color_vertex_list = {'color': GeomVertexWriter(vdata, 'color')} # Так же создаем дополнительные слои. for", "дополнительные колонки. for col in obj.data.vertex_colors: # Если имя не совподает с активным.", "obj.data.vertex_colors[name].data[triangle.loops[2]].color[1], obj.data.vertex_colors[name].data[triangle.loops[2]].color[2], obj.data.vertex_colors[name].data[triangle.loops[2]].color[3]) list_vertext[triangle.loops[2]] = None # Добавляем вершины в примитив. prim.addVertices(triangle.loops[0], triangle.loops[1],", "# Проверяем существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save) # Объединяем путь", "obj.children: recurse(child, obj) recurse(obj, obj.parent) return root.node().getChild(0) import os from datetime import datetime", "conversion_transform(obj): pos = Point3(*obj.matrix_world.translation) quat = LQuaternion(*obj.matrix_world.to_quaternion()) scale = Point3(*obj.matrix_world.to_scale()) transform = TransformState.make_pos_quat_scale(pos,", "многоугольников. for name in named_not_quad: # Нужно разбить многоугольники на треугольники. for poly", "Notify.out() list_object_support = {'MESH': False, 'PERSP': False, 'ORTHO': False, 'CAMERA':True} def show_message_box(message =", "import CollisionPolygon, CollisionNode import bpy import bmesh from mathutils.geometry import distance_point_to_plane ostream =", "for poly in obj.data.polygons: if len(poly.vertices) >= 5: not_quad.append(poly) for i in obj.data.vertices:", "= 'INFO'): def draw(self, context): self.layout.label(text = message) bpy.context.window_manager.popup_menu(draw, title = title, icon", "node_path = NodePath(collision_node) node_path.reparentTo(group) if obj.data.materials[collision_node.name].hatcher.visibility_collision_polygons: node_path.show() collision_node = CollisionNode(obj.name) # Создаем полигоны", "create_object = camera_create # Если есть родитель. if not parent: npp = NodePath(create_object(obj,", "\"Generator_scene\" def execute(self, context): start_time = datetime.now() context.view_layer.update() # Объединяем путь проекта и", "root.add_child(node) # Объединяем путь директории и имя сцены. path_save = os.path.join(path_project_save, context.scene.name) bam_writer_file(path_save,", "in named_not_quad: # Нужно разбить многоугольники на треугольники. for poly in named_not_quad[name]: for", "+ abs(frame_size[1][0]), abs(frame_size[0][1]) + abs(frame_size[1][1])) lens.set_focal_length(abs(frame_size[0][2])) lens.set_near_far(obj.data.clip_start, obj.data.clip_end) if obj.hatcher.coordinate_system == \"CS_default\": lens.set_coordinate_system(CS_default)", "in list_vertext: vertex_position.set_row(triangle.loops[1]) normal_vertex.set_row(triangle.loops[1]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[1]].co[0], obj.data.vertices[triangle.vertices[1]].co[1], obj.data.vertices[triangle.vertices[1]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[1]].normal[0], obj.data.vertices[triangle.vertices[1]].normal[1], obj.data.vertices[triangle.vertices[1]].normal[2]) else:", "file.openWrite(Filename.fromOsSpecific(path_save + '.bam')) writer: BamWriter = file.getWriter() writer.writeObject(obj) writer.flush() file.close() def conversion_transform(obj): pos", "1: node_path = NodePath(collision_node) node_path.reparentTo(group) # Если стоит флажок показывать полигон столкновения. if", "color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[0]].color[0], obj.data.vertex_colors.active.data[triangle.loops[0]].color[1], obj.data.vertex_colors.active.data[triangle.loops[0]].color[2], obj.data.vertex_colors.active.data[triangle.loops[0]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[0]].color[0], obj.data.vertex_colors[name].data[triangle.loops[0]].color[1], obj.data.vertex_colors[name].data[triangle.loops[0]].color[2], obj.data.vertex_colors[name].data[triangle.loops[0]].color[3]) list_vertext[triangle.loops[0]] = None #", "col.name == obj.data.vertex_colors.active.name: color_vertex_list[col.name] = GeomVertexWriter(vdata, 'color.{}'.format(col.name)) # Если используются координаты текстур. if", "draw(self, context): self.layout.label(text = message) bpy.context.window_manager.popup_menu(draw, title = title, icon = icon) def", "if texcoord: texcoord_vertex_list = {'texcoord': GeomVertexWriter(vdata, 'texcoord')} # Так же создаем дополнительные слои.", "# Проверяем существует ли директория, если нет то создаем. if not os.path.exists(path_project_save): try:", "bam_writer_file(path_save, node) show_message_box('Export selected, completed, time: {}'.format(datetime.now() - start_time), \"Message\") return {'FINISHED'} class", "именем. if hasattr(obj.data.materials[poly.material_index], 'name'): # Если полигон из трех вершин, проверка на компланарность", "uv in obj.data.uv_layers: # Если имя не совподает с активным. if not uv.name", "i.select = False for poly in not_coplanar: poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def", "'vertex') normal_vertex = GeomVertexWriter(vdata, 'normal') # Если используются цвета вершин. if color: color_vertex_list", "os.path.exists(path_project_save): try: os.makedirs(path_project_save) except OSError as error: #print(error) pass def bam_writer_file(path_save, obj): file", "i.select=False for i in obj.data.edges: i.select=False for i in obj.data.polygons: i.select = False", "obj.name) bam_writer_file(path_save, node) else: node = build_hierarchy(obj, context.scene) # Объединяем путь директории и", "geom_vertex_format.add_column(\"texcoord\", 2, Geom.NT_float32, Geom.C_texcoord) # Так же создаем дополнительные колонки. for uv in", "elif len(poly.vertices) >= 4: add_polygons_to_dict(named_not_quad, poly, obj) # Если нет материала, то рассортировываем", "lens.set_coordinate_system(CS_default) if obj.hatcher.coordinate_system == \"CS_zup_right\": lens.set_coordinate_system(CS_zup_right) if obj.hatcher.coordinate_system == \"CS_yup_right\": lens.set_coordinate_system(CS_yup_right) if obj.hatcher.coordinate_system", "на треугольники. for poly in not_coplanar: for vertext in triangle_poly(poly, obj).values(): quad =", "= [] # Перебираем полигоны объекта. for poly in obj.data.polygons: # Если список", "нужна. if len(poly.vertices) == 3: for index in poly.vertices[2:]: triangles.append(poly) # Если у", "# Если используются цвета вершин. if color: color_vertex_list = {'color': GeomVertexWriter(vdata, 'color')} #", "не нужна. if len(poly.vertices) == 3: for index in poly.vertices[2:]: triangles.append(poly) # Если", "Geom.C_color) # Проверка есть ли активные текстурные координаты у объекта. if obj.data.uv_layers.active: texcoord", "'color.{}'.format(col.name)) # Если используются координаты текстур. if texcoord: texcoord_vertex_list = {'texcoord': GeomVertexWriter(vdata, 'texcoord')}", "obj.data.vertex_colors.active.data[triangle.loops[1]].color[2], obj.data.vertex_colors.active.data[triangle.loops[1]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[1]].color[0], obj.data.vertex_colors[name].data[triangle.loops[1]].color[1], obj.data.vertex_colors[name].data[triangle.loops[1]].color[2], obj.data.vertex_colors[name].data[triangle.loops[1]].color[3]) list_vertext[triangle.loops[1]] = None # Обработка третьей", "scale = Point3(*obj.matrix_world.to_scale()) transform = TransformState.make_pos_quat_scale(pos, quat, scale) return transform def get_format(obj): color", "примитив. prim.addVertices(triangle.loops[0], triangle.loops[1], triangle.loops[2]) prim.closePrimitive() geom = Geom(vdata) geom.addPrimitive(prim) return geom def select_not_quad(obj):", "= CollisionNode(name) collision_node.add_solid(quad) collision_node_dict[name] = collision_node def collision_polygon_create(obj, scene): named_triangles = {} named_coplanar", "select_not_coplanar(context.object) return {'FINISHED'} class CheckingQuad(bpy.types.Operator): bl_idname = \"ui.check_quad\" bl_label = \"Checking_quad\" def execute(self,", "obj.data.vertices[triangle.vertices[0]].co[1], obj.data.vertices[triangle.vertices[0]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[0]].normal[0], obj.data.vertices[triangle.vertices[0]].normal[1], obj.data.vertices[triangle.vertices[0]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord:", "v2 = obj.data.vertices[poly.vertices[2]].co - obj.data.vertices[poly.vertices[0]].co for index in poly.vertices[3:]: if abs(distance_point_to_plane(obj.data.vertices[index].co, obj.data.vertices[poly.vertices[0]].co, v1.cross(v2)))", "= NodePath(obj.name) collision_node_dict = {} vertext_quad = [] # Создаем полигоны столкновения из", "\"CS_zup_left\": lens.set_coordinate_system(CS_zup_left) if obj.hatcher.coordinate_system == \"CS_yup_left\": lens.set_coordinate_system(CS_yup_left) if obj.hatcher.coordinate_system == \"CS_invalid\": lens.set_coordinate_system(CS_invalid) camera", "if collision_node.getNumSolids() >= 1: node_path = NodePath(collision_node) node_path.reparentTo(group) # Если стоит флажок показывать", "bl_label = \"Generator_selected\" def execute(self, context): start_time = datetime.now() context.view_layer.update() # Объединяем путь", "# Если нет, то раздельно. else: # Перебираем список выбранных объектов. for obj", "CollisionNode import bpy import bmesh from mathutils.geometry import distance_point_to_plane ostream = Notify.out() list_object_support", "0 for index in poly.vertices: triangulator3.add_polygon_vertex(index_tr) triangulator3.add_vertex(*obj.data.vertices[index].co) index_tr += 1 triangulator3.triangulate() for i", "у полигона четыре вершины, необходимо проверить на компланарность. elif len(poly.vertices) == 4: if", "True # Создаем колонку для цвета c именем по умолчанию. geom_vertex_format.add_column(\"color\", 4, Geom.NT_uint8,", "# Если у полигона более четырех вершин, необходимо разбить на треугольники. elif len(poly.vertices)", "путь директории и имя файла. path_save = os.path.join(path_project_save, obj.name) node = build_hierarchy(obj, context.scene)", "материалов не пуст. if len(obj.data.materials) >= 1: # Если есть слот материала и", "сеткой. if obj.type == \"MESH\": if obj.hatcher.type_mesh == \"Render\": create_object = geom_node_create if", "for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[0]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[0]].color[0], obj.data.vertex_colors.active.data[triangle.loops[0]].color[1], obj.data.vertex_colors.active.data[triangle.loops[0]].color[2], obj.data.vertex_colors.active.data[triangle.loops[0]].color[3])", "build_hierarchy(obj, context.scene) root.add_child(node) # Объединяем путь директории и имя файла. path_save = os.path.join(path_project_save,", "obj.data.materials[collision_node.name].hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) node_path = NodePath(collision_node) node_path.reparentTo(group) if obj.data.materials[collision_node.name].hatcher.visibility_collision_polygons: node_path.show() collision_node = CollisionNode(obj.name)", "4: if check_coplanar(obj, poly): coplanar.append(poly) else: not_coplanar.append(poly) # Если у полигона более четырех", "Объединяем путь проекта и относительную директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_scene) # Проверяем", "bit = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.draw_mask_1.decode('utf-8'), obj.hatcher.draw_mask_2.decode('utf-8'), obj.hatcher.draw_mask_3.decode('utf-8'), obj.hatcher.draw_mask_4.decode('utf-8'), obj.hatcher.draw_mask_5.decode('utf-8'), obj.hatcher.draw_mask_6.decode('utf-8'), obj.hatcher.draw_mask_7.decode('utf-8'), obj.hatcher.draw_mask_8.decode('utf-8')) camera.camera_mask = int(bit,", "GeomVertexWriter(vdata, 'color')} # Так же создаем дополнительные слои. for col in obj.data.vertex_colors: #", "= Geom(vdata) geom.addPrimitive(prim) return geom def select_not_quad(obj): not_quad = [] for poly in", "# Создаем корень для объединения. root = ModelRoot('{}.bam'.format(context.scene.hatcher.file_name_selected)) # Перебираем список выбранных объектов.", "ModelRoot from panda3d.core import BamFile, BamWriter, Filename, Notify from panda3d.core import CollisionPolygon, CollisionNode", "на треугольники. for poly in named_not_coplanar[name]: for vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0],", "= os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_scene) # Проверяем существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save)", "в один файл. if not context.scene.hatcher.file_name_selected == '': # Создаем корень для объединения.", "CheckingCoplanarity(bpy.types.Operator): bl_idname = \"ui.check_coplanarity\" bl_label = \"Checking_coplanarity\" def execute(self, context): select_not_coplanar(context.object) return {'FINISHED'}", "context.scene) # Объединяем путь директории и имя файла. path_save = os.path.join(path_project_save, obj.name) bam_writer_file(path_save,", "poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]), name) vertext_quad = [] # Создаем", "name) vertext_quad = [] # Создаем полигоны столкновения из некомпланарных прямольников. for name", "def bam_writer_file(path_save, obj): file = BamFile() file.openWrite(Filename.fromOsSpecific(path_save + '.bam')) writer: BamWriter = file.getWriter()", "name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[2]].uv[0], obj.data.uv_layers.active.data[triangle.loops[2]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[2]].uv[0], obj.data.uv_layers[name].data[triangle.loops[2]].uv[1]) if color: for name in", "add_polygons_to_dict(named_not_coplanar, poly, obj) # Если у полигона более четырех вершин, необходимо разбить на", "= {} named_not_quad = {} triangles = [] coplanar = [] not_coplanar =", "collision_node.add_solid(quad) vertext_quad = [] # Создаем полигоны столкновения из компланарных прямольников. for poly", "name in collision_node_dict: collision_node_dict[name].add_solid(quad) else: collision_node = CollisionNode(name) collision_node.add_solid(quad) collision_node_dict[name] = collision_node def", "список. dict_named[obj.data.materials[poly.material_index].name] = [poly] else: # Если есть такой ключ, добавляем к списку.", "директории и имя сцены. path_save = os.path.join(path_project_save, context.scene.name) bam_writer_file(path_save, root) show_message_box('Export scene, completed,", "# Объединяем путь директории и имя файла. path_save = os.path.join(path_project_save, context.scene.hatcher.file_name_selected) bam_writer_file(path_save, root)", "named_not_quad[name]: for vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) for collision_node", "vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) for collision_node in collision_node_dict.values():", "+ '.bam')) writer: BamWriter = file.getWriter() writer.writeObject(obj) writer.flush() file.close() def conversion_transform(obj): pos =", "create_object = geom_node_create if obj.hatcher.type_mesh == \"Collision\": create_object = collision_polygon_create # Если объект", "class ExportObject(bpy.types.Operator): bl_idname = \"ui.export_object\" bl_label = \"Generator_object\" def execute(self, context): start_time =", "== 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[0]].uv[0], obj.data.uv_layers.active.data[triangle.loops[0]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[0]].uv[0], obj.data.uv_layers[name].data[triangle.loops[0]].uv[1]) if color: for name in color_vertex_list:", "trangle = {} triangulator3 = Triangulator3() index_tr = 0 for index in poly.vertices:", "\"LIGHT\": create_object = \"LIGHT\" # Если объект является камерой. if obj.type == \"CAMERA\":", "return group.node().getChild(0) def geom_node_create(obj, scene): geom = geom_create(obj) geom_node = GeomNode(obj.data.name) geom_node.addGeom(geom) return", "полигоны объекта. for poly in obj.data.polygons: # Если список материалов не пуст. if", ">= 1: node_path = NodePath(collision_node) node_path.reparentTo(group) # Если стоит флажок показывать полигон столкновения.", "2) camera.set_lens(lens) return camera def build_hierarchy(obj, scene): # Узел для формирование иерархии root", "name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[0]].color[0], obj.data.vertex_colors.active.data[triangle.loops[0]].color[1], obj.data.vertex_colors.active.data[triangle.loops[0]].color[2], obj.data.vertex_colors.active.data[triangle.loops[0]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[0]].color[0], obj.data.vertex_colors[name].data[triangle.loops[0]].color[1], obj.data.vertex_colors[name].data[triangle.loops[0]].color[2], obj.data.vertex_colors[name].data[triangle.loops[0]].color[3]) list_vertext[triangle.loops[0]]", "obj.hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) # Если полигон столкновения содержит тела. if collision_node.getNumSolids() >= 1:", "Notify from panda3d.core import CollisionPolygon, CollisionNode import bpy import bmesh from mathutils.geometry import", "ли активные текстурные координаты у объекта. if obj.data.uv_layers.active: texcoord = True # Создаем", "checkcreate_dirs(path_project_save) # Объединяем путь директории и имя файла. path_save = os.path.join(path_project_save, obj.name) node", "= NodePath(collision_node) node_path.reparentTo(group) if obj.data.materials[collision_node.name].hatcher.visibility_collision_polygons: node_path.show() collision_node = CollisionNode(obj.name) # Создаем полигоны столкновения", "Пройдем по всем объектом в сцене. for obj in context.scene.objects: # Нас интересуют", "return geom_node def camera_create(obj, scene): frame_size = obj.data.view_frame(scene = scene) if obj.data.type ==", "geom_node = GeomNode(obj.data.name) geom_node.addGeom(geom) return geom_node def camera_create(obj, scene): frame_size = obj.data.view_frame(scene =", "PandaNode, NodePath, ModelRoot from panda3d.core import BamFile, BamWriter, Filename, Notify from panda3d.core import", "not_quad = [] # Перебираем полигоны объекта. for poly in obj.data.polygons: # Если", "######################## ######################## group = NodePath(obj.name) collision_node_dict = {} vertext_quad = [] # Создаем", "v1[2]), (v2[0], v2[1], v2[2])) return trangle def add_polygons_to_dict(dict_named, poly, obj): # Если нет", "poly.vertices[2:]: add_polygons_to_dict(named_triangles, poly, obj) # Если у полигона четыре вершины, необходимо проверить на", "индексы обработаных вершин. list_vertext = {} # Проходим по треугольниуам. for triangle in", "содержит имя, рассортировываем их по словарям под этим именем. if hasattr(obj.data.materials[poly.material_index], 'name'): #", "#npp.show() npp.reparentTo(root) npp.set_transform(root, conversion_transform(obj)) else: # Если нет родителя. np = NodePath(create_object(obj, scene))", "obj.hatcher.draw_mask_3.decode('utf-8'), obj.hatcher.draw_mask_4.decode('utf-8'), obj.hatcher.draw_mask_5.decode('utf-8'), obj.hatcher.draw_mask_6.decode('utf-8'), obj.hatcher.draw_mask_7.decode('utf-8'), obj.hatcher.draw_mask_8.decode('utf-8')) camera.camera_mask = int(bit, 2) camera.set_lens(lens) return camera", "Если список материалов не пуст. if len(obj.data.materials) >= 1: # Если есть слот", "= GeomVertexFormat.registerFormat(my_format) return end_format, color, texcoord def geom_create(obj): geom_vertex_format = get_format(obj) color =", "obj.data.vertices[triangle.vertices[0]].normal[1], obj.data.vertices[triangle.vertices[0]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[0])", "# Создаем полигоны столкновения из треугольников. for name in named_triangles: for poly in", "triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) # Нужно разбить полигоны у", "\"CS_invalid\": lens.set_coordinate_system(CS_invalid) camera = Camera(obj.data.name) camera.active = obj.hatcher.camera_active bit = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.draw_mask_1.decode('utf-8'), obj.hatcher.draw_mask_2.decode('utf-8'), obj.hatcher.draw_mask_3.decode('utf-8'),", "интересуют объекты только без родителя. if not obj.parent: # Проверим есть ли данный", "path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_scene) # Проверяем существует ли директория, если нет то создаем.", "# Дабавляем ключ и список. dict_named[obj.data.materials[poly.material_index].name] = [poly] else: # Если есть такой", "scene): # Узел для формирование иерархии root = NodePath(\"root\") # Выполним рекурсию, для", "execute(self, context): start_time = datetime.now() context.view_layer.update() # Объединяем путь проекта и относительную директорию", "in obj.data.polygons: if len(poly.vertices) >= 5: not_quad.append(poly) for i in obj.data.vertices: i.select=False for", "poly.vertices[2:]: triangles.append(poly) # Если у полигона четыре вершины, необходимо проверить на компланарность. elif", "треугольники. elif len(poly.vertices) >= 4: add_polygons_to_dict(named_not_quad, poly, obj) # Если нет материала, то", "имя не совподает с активным. if not uv.name == obj.data.uv_layers.active.name: texcoord_vertex_list[uv.name] = GeomVertexWriter(vdata,", "vertext[2]), name) for collision_node in collision_node_dict.values(): from_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.from_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_5.decode('utf-8'),", "[] # Нужно разбить некомпланарные полигоны, на треугольники. for poly in not_coplanar: for", "# Если используются координаты текстур. if texcoord: texcoord_vertex_list = {'texcoord': GeomVertexWriter(vdata, 'texcoord')} #", "colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) for collision_node in collision_node_dict.values(): from_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.from_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_2.decode('utf-8'),", "node = build_hierarchy(obj, context.scene) root = ModelRoot('{}.bam'.format(obj.name)) root.add_child(node) bam_writer_file(path_save, root) show_message_box('Export object: {}", "Если нет родителя. np = NodePath(create_object(obj, scene)) #np.setName(obj.name) #np.show() # Проверяем есть ли", "[] # Перебираем полигоны объекта. for poly in obj.data.polygons: # Если список материалов", "экспорта данного типа объекта. create_object = None # Если объект является сеткой. if", "3: status = True elif len(poly.vertices) >= 3: v1 = obj.data.vertices[poly.vertices[1]].co - obj.data.vertices[poly.vertices[0]].co", "для объединения. root = ModelRoot('{}.bam'.format(context.scene.name)) # Пройдем по всем объектом в сцене. for", "список выбранных объектов. for obj in context.selected_objects: # Проверим есть ли данный тип", "poly in not_coplanar: poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def triangle_poly(poly, obj): trangle =", "Создаем корень для объединения. root = ModelRoot('{}.bam'.format(context.scene.name)) # Пройдем по всем объектом в", "obj.data.type != 'PANO': create_object = camera_create # Если есть родитель. if not parent:", "используются координаты текстур. if texcoord: texcoord_vertex_list = {'texcoord': GeomVertexWriter(vdata, 'texcoord')} # Так же", "datetime class ExportObject(bpy.types.Operator): bl_idname = \"ui.export_object\" bl_label = \"Generator_object\" def execute(self, context): start_time", "in list_object_support: # Если есть ли подтип. if list_object_support[obj.type]: if not obj.data.type ==", "CS_yup_right, CS_zup_left, CS_yup_left, CS_invalid from panda3d.core import GeomVertexArrayFormat, Geom, GeomVertexFormat, GeomVertexData, GeomVertexWriter, Triangulator3,", "if len(poly.vertices) >= 5: not_quad.append(poly) for i in obj.data.vertices: i.select=False for i in", "файла. path_save = os.path.join(path_project_save, obj.name) node = build_hierarchy(obj, context.scene) root = ModelRoot('{}.bam'.format(obj.name)) root.add_child(node)", "компланарность не нужна. if len(poly.vertices) == 3: for index in poly.vertices[2:]: add_polygons_to_dict(named_triangles, poly,", "Geom.C_texcoord) # Так же создаем дополнительные колонки. for uv in obj.data.uv_layers: # Если", "v2[1], v2[2])) return trangle def add_polygons_to_dict(dict_named, poly, obj): # Если нет такого ключа", "pos = Point3(*obj.matrix_world.translation) quat = LQuaternion(*obj.matrix_world.to_quaternion()) scale = Point3(*obj.matrix_world.to_scale()) transform = TransformState.make_pos_quat_scale(pos, quat,", "= Camera(obj.data.name) camera.active = obj.hatcher.camera_active bit = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.draw_mask_1.decode('utf-8'), obj.hatcher.draw_mask_2.decode('utf-8'), obj.hatcher.draw_mask_3.decode('utf-8'), obj.hatcher.draw_mask_4.decode('utf-8'), obj.hatcher.draw_mask_5.decode('utf-8'), obj.hatcher.draw_mask_6.decode('utf-8'),", "значит полигон автоматически копланарен. if len(poly.vertices) == 3: status = True elif len(poly.vertices)", "{} named_coplanar = {} named_not_coplanar = {} named_not_quad = {} triangles = []", "Point3(*obj.matrix_world.translation) quat = LQuaternion(*obj.matrix_world.to_quaternion()) scale = Point3(*obj.matrix_world.to_scale()) transform = TransformState.make_pos_quat_scale(pos, quat, scale) return", "poly): add_polygons_to_dict(named_coplanar, poly, obj) else: add_polygons_to_dict(named_not_coplanar, poly, obj) # Если у полигона более", "status = False return status def select_not_coplanar(obj): not_coplanar = [] for poly in", "vertext[1], vertext[2]), name) for collision_node in collision_node_dict.values(): from_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.from_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_4.decode('utf-8'),", "Проверка есть ли активные текстурные координаты у объекта. if obj.data.uv_layers.active: texcoord = True", "bmesh from mathutils.geometry import distance_point_to_plane ostream = Notify.out() list_object_support = {'MESH': False, 'PERSP':", "camera.camera_mask = int(bit, 2) camera.set_lens(lens) return camera def build_hierarchy(obj, scene): # Узел для", "# Создаем полигоны столкновения из некомпланарных прямольников. for name in named_not_coplanar: # Нужно", "for vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) # Создаем полигоны", "CS_zup_right, CS_yup_right, CS_zup_left, CS_yup_left, CS_invalid from panda3d.core import GeomVertexArrayFormat, Geom, GeomVertexFormat, GeomVertexData, GeomVertexWriter,", "obj.data.vertex_colors[name].data[triangle.loops[0]].color[3]) list_vertext[triangle.loops[0]] = None # Обработка второй вершины. if not triangle.loops[1] in list_vertext:", "с активным. if not col.name == obj.data.vertex_colors.active.name: geom_vertex_format.add_column('color.{}'.format(col.name), 4, Geom.NT_uint8, Geom.C_color) # Проверка", "if obj.hatcher.coordinate_system == \"CS_zup_right\": lens.set_coordinate_system(CS_zup_right) if obj.hatcher.coordinate_system == \"CS_yup_right\": lens.set_coordinate_system(CS_yup_right) if obj.hatcher.coordinate_system ==", "то объеденяем в один файл. if not context.scene.hatcher.file_name_selected == '': # Создаем корень", "= BamFile() file.openWrite(Filename.fromOsSpecific(path_save + '.bam')) writer: BamWriter = file.getWriter() writer.writeObject(obj) writer.flush() file.close() def", "obj.data.vertices[triangle.vertices[1]].normal[1], obj.data.vertices[triangle.vertices[1]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[1])", "объектов. for obj in context.selected_objects: # Проверим есть ли данный тип объекта среди", "list_vertext[triangle.loops[1]] = None # Обработка третьей вершины. if not triangle.loops[2] in list_vertext: vertex_position.set_row(triangle.loops[2])", "name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[1]].color[0], obj.data.vertex_colors.active.data[triangle.loops[1]].color[1], obj.data.vertex_colors.active.data[triangle.loops[1]].color[2], obj.data.vertex_colors.active.data[triangle.loops[1]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[1]].color[0], obj.data.vertex_colors[name].data[triangle.loops[1]].color[1], obj.data.vertex_colors[name].data[triangle.loops[1]].color[2], obj.data.vertex_colors[name].data[triangle.loops[1]].color[3]) list_vertext[triangle.loops[1]]", "path_save = os.path.join(path_project_save, obj.name) node = build_hierarchy(obj, context.scene) root = ModelRoot('{}.bam'.format(obj.name)) root.add_child(node) bam_writer_file(path_save,", "# Обработка третьей вершины. if not triangle.loops[2] in list_vertext: vertex_position.set_row(triangle.loops[2]) normal_vertex.set_row(triangle.loops[2]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[2]].co[0], obj.data.vertices[triangle.vertices[2]].co[1],", "Если есть такой ключ, добавляем к списку. dict_named[obj.data.materials[poly.material_index].name].append(poly) def colnode_add_dict(collision_node_dict, quad, name): if", "{'MESH': False, 'PERSP': False, 'ORTHO': False, 'CAMERA':True} def show_message_box(message = \"\", title =", "'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[2]].color[0], obj.data.vertex_colors.active.data[triangle.loops[2]].color[1], obj.data.vertex_colors.active.data[triangle.loops[2]].color[2], obj.data.vertex_colors.active.data[triangle.loops[2]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[2]].color[0], obj.data.vertex_colors[name].data[triangle.loops[2]].color[1], obj.data.vertex_colors[name].data[triangle.loops[2]].color[2], obj.data.vertex_colors[name].data[triangle.loops[2]].color[3]) list_vertext[triangle.loops[2]] = None", "geom_create(obj): geom_vertex_format = get_format(obj) color = geom_vertex_format[1] texcoord = geom_vertex_format[2] vdata = GeomVertexData(obj.data.name,", "поле имени файла заполнено, то объеденяем в один файл. if not context.scene.hatcher.file_name_selected ==", "context.scene) root.add_child(node) # Объединяем путь директории и имя файла. path_save = os.path.join(path_project_save, context.scene.hatcher.file_name_selected)", "функцию необходимую для экспорта данного типа объекта. create_object = None # Если объект", "list_vertext[triangle.loops[0]] = None # Обработка второй вершины. if not triangle.loops[1] in list_vertext: vertex_position.set_row(triangle.loops[1])", "collision_polygon_create # Если объект является источником цвета. if obj.type == \"LIGHT\": create_object =", "transform def get_format(obj): color = False texcoord = False # Создаем новый массив.", "есть ли активные текстурные координаты у объекта. if obj.data.uv_layers.active: texcoord = True #", "select_not_coplanar(obj): not_coplanar = [] for poly in obj.data.polygons: if not check_coplanar(obj, poly): not_coplanar.append(poly)", "для объединения. root = ModelRoot('{}.bam'.format(context.scene.hatcher.file_name_selected)) # Перебираем список выбранных объектов. for obj in", "корень для объединения. root = ModelRoot('{}.bam'.format(context.scene.hatcher.file_name_selected)) # Перебираем список выбранных объектов. for obj", "else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[0]) if name", "== 4: # Если полигон компланарный if check_coplanar(obj, poly): add_polygons_to_dict(named_coplanar, poly, obj) else:", "color = False texcoord = False # Создаем новый массив. geom_vertex_format = GeomVertexArrayFormat()", "= [] not_coplanar = [] not_quad = [] # Перебираем полигоны объекта. for", "# Если нет такого ключа в словаре. if not obj.data.materials[poly.material_index].name in dict_named: #", "более четырех вершин, необходимо разбить на треугольники. elif len(poly.vertices) >= 4: add_polygons_to_dict(named_not_quad, poly,", "имя не совподает с активным. if not col.name == obj.data.vertex_colors.active.name: color_vertex_list[col.name] = GeomVertexWriter(vdata,", "color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[1]].color[0], obj.data.vertex_colors[name].data[triangle.loops[1]].color[1], obj.data.vertex_colors[name].data[triangle.loops[1]].color[2], obj.data.vertex_colors[name].data[triangle.loops[1]].color[3]) list_vertext[triangle.loops[1]] = None # Обработка третьей вершины. if not", "== 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[1]].color[0], obj.data.vertex_colors.active.data[triangle.loops[1]].color[1], obj.data.vertex_colors.active.data[triangle.loops[1]].color[2], obj.data.vertex_colors.active.data[triangle.loops[1]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[1]].color[0], obj.data.vertex_colors[name].data[triangle.loops[1]].color[1], obj.data.vertex_colors[name].data[triangle.loops[1]].color[2], obj.data.vertex_colors[name].data[triangle.loops[1]].color[3]) list_vertext[triangle.loops[1]] =", "= obj.data.view_frame(scene = scene) if obj.data.type == 'PERSP': lens = PerspectiveLens() if obj.data.type", "if result: np.reparentTo(result) np.set_transform(root, conversion_transform(obj)) else: np.reparentTo(root) np.set_transform(root, conversion_transform(obj)) # Проходим по детям.", "'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[1]].color[0], obj.data.vertex_colors.active.data[triangle.loops[1]].color[1], obj.data.vertex_colors.active.data[triangle.loops[1]].color[2], obj.data.vertex_colors.active.data[triangle.loops[1]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[1]].color[0], obj.data.vertex_colors[name].data[triangle.loops[1]].color[1], obj.data.vertex_colors[name].data[triangle.loops[1]].color[2], obj.data.vertex_colors[name].data[triangle.loops[1]].color[3]) list_vertext[triangle.loops[1]] = None", "not uv.name == obj.data.uv_layers.active.name: texcoord_vertex_list[uv.name] = GeomVertexWriter(vdata, 'texcoord.{}'.format(uv.name)) # Запишем порядок треугольников. prim", "# Перебираем список выбранных объектов. for obj in context.selected_objects: # Объединяем путь проекта", "# Выполним рекурсию, для поиска всех. def recurse(obj, parent): # Переменая которая содережит", "for index in poly.vertices[2:]: add_polygons_to_dict(named_triangles, poly, obj) # Если у полигона четыре вершины,", "= Point3(*obj.matrix_world.translation) quat = LQuaternion(*obj.matrix_world.to_quaternion()) scale = Point3(*obj.matrix_world.to_scale()) transform = TransformState.make_pos_quat_scale(pos, quat, scale)", "triangulator3 = Triangulator3() index_tr = 0 for index in poly.vertices: triangulator3.add_polygon_vertex(index_tr) triangulator3.add_vertex(*obj.data.vertices[index].co) index_tr", "# Если объект является сеткой. if obj.type == \"MESH\": if obj.hatcher.type_mesh == \"Render\":", "color = True # Создаем колонку для цвета c именем по умолчанию. geom_vertex_format.add_column(\"color\",", "\"Message\") return {'FINISHED'} class CheckingCoplanarity(bpy.types.Operator): bl_idname = \"ui.check_coplanarity\" bl_label = \"Checking_coplanarity\" def execute(self,", "Объединяем путь директории и имя файла. path_save = os.path.join(path_project_save, obj.name) node = build_hierarchy(obj,", "директории и имя файла. path_save = os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) else: node =", "not os.path.exists(path_project_save): try: os.makedirs(path_project_save) except OSError as error: #print(error) pass def bam_writer_file(path_save, obj):", "имени файла заполнено, то объеденяем в один файл. if not context.scene.hatcher.file_name_selected == '':", "есть ли подтип. if list_object_support[obj.type]: if not obj.data.type == 'PANO': node = build_hierarchy(obj,", "os from datetime import datetime class ExportObject(bpy.types.Operator): bl_idname = \"ui.export_object\" bl_label = \"Generator_object\"", "get_format(obj) color = geom_vertex_format[1] texcoord = geom_vertex_format[2] vdata = GeomVertexData(obj.data.name, geom_vertex_format[0], Geom.UHStatic) vdata.set_num_rows(len(obj.data.vertices))", "name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[2]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[2]].uv[0], obj.data.uv_layers.active.data[triangle.loops[2]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[2]].uv[0], obj.data.uv_layers[name].data[triangle.loops[2]].uv[1])", "трех вершин, проверка на компланарность не нужна. if len(poly.vertices) == 3: for index", "вершины, необходимо проверить на компланарность. elif len(poly.vertices) == 4: # Если полигон компланарный", "= LQuaternion(*obj.matrix_world.to_quaternion()) scale = Point3(*obj.matrix_world.to_scale()) transform = TransformState.make_pos_quat_scale(pos, quat, scale) return transform def", "index in poly.vertices[3:]: if abs(distance_point_to_plane(obj.data.vertices[index].co, obj.data.vertices[poly.vertices[0]].co, v1.cross(v2))) < 1e-6: status = True else:", "obj.data.materials[collision_node.name].hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.into_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_8.decode('utf-8'))", "{'FINISHED'} class CheckingQuad(bpy.types.Operator): bl_idname = \"ui.check_quad\" bl_label = \"Checking_quad\" def execute(self, context): select_not_quad(context.object)", "obj.data.polygons: if not check_coplanar(obj, poly): not_coplanar.append(poly) for i in obj.data.vertices: i.select=False for i", "in named_not_quad[name]: for vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) for", "prim.addVertices(triangle.loops[0], triangle.loops[1], triangle.loops[2]) prim.closePrimitive() geom = Geom(vdata) geom.addPrimitive(prim) return geom def select_not_quad(obj): not_quad", "создаем. checkcreate_dirs(path_project_save) # Объединяем путь директории и имя файла. path_save = os.path.join(path_project_save, obj.name)", "context.scene.name) bam_writer_file(path_save, root) show_message_box('Export scene, completed, time: {}'.format(datetime.now() - start_time), \"Message\") return {'FINISHED'}", "poly in named_coplanar[name]: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]),", "error: #print(error) pass def bam_writer_file(path_save, obj): file = BamFile() file.openWrite(Filename.fromOsSpecific(path_save + '.bam')) writer:", "один файл. if not context.scene.hatcher.file_name_selected == '': # Создаем корень для объединения. root", "Geom.C_point) geom_vertex_format.add_column(\"normal\", 3, Geom.NT_float32, Geom.C_normal) # Проверка есть ли цвета вершин у объекта.", "= build_hierarchy(obj, context.scene) root.add_child(node) else: node = build_hierarchy(obj, context.scene) root.add_child(node) # Объединяем путь", "obj.data.materials[collision_node.name].hatcher.into_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) node_path = NodePath(collision_node) node_path.reparentTo(group)", "файла. path_save = os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) show_message_box('Export selected, completed, time: {}'.format(datetime.now() -", "def geom_node_create(obj, scene): geom = geom_create(obj) geom_node = GeomNode(obj.data.name) geom_node.addGeom(geom) return geom_node def", "== \"CS_zup_left\": lens.set_coordinate_system(CS_zup_left) if obj.hatcher.coordinate_system == \"CS_yup_left\": lens.set_coordinate_system(CS_yup_left) if obj.hatcher.coordinate_system == \"CS_invalid\": lens.set_coordinate_system(CS_invalid)", "существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save) # Если поле имени файла", "необходимо разбить на треугольники. elif len(poly.vertices) >= 4: not_quad.append(poly) ######################## ######################## group =", "есть ли данный тип объекта среди поддерживаемых. if obj.type in list_object_support: # Если", "Так же создаем дополнительные колонки. for uv in obj.data.uv_layers: # Если имя не", "< 1e-6: status = True else: status = False return status def select_not_coplanar(obj):", "if not check_coplanar(obj, poly): not_coplanar.append(poly) for i in obj.data.vertices: i.select=False for i in", "check_coplanar(obj, poly): add_polygons_to_dict(named_coplanar, poly, obj) else: add_polygons_to_dict(named_not_coplanar, poly, obj) # Если у полигона", "root.add_child(node) else: node = build_hierarchy(obj, context.scene) root.add_child(node) # Объединяем путь директории и имя", "poly.vertices[3:]: if abs(distance_point_to_plane(obj.data.vertices[index].co, obj.data.vertices[poly.vertices[0]].co, v1.cross(v2))) < 1e-6: status = True else: status =", "= {'color': GeomVertexWriter(vdata, 'color')} # Так же создаем дополнительные слои. for col in", "obj.data.edges: i.select=False for i in obj.data.polygons: i.select = False for poly in not_coplanar:", "= \"Checking_coplanarity\" def execute(self, context): select_not_coplanar(context.object) return {'FINISHED'} class CheckingQuad(bpy.types.Operator): bl_idname = \"ui.check_quad\"", "на треугольники. elif len(poly.vertices) >= 4: add_polygons_to_dict(named_not_quad, poly, obj) # Если нет материала,", "hasattr(obj.data.materials[poly.material_index], 'name'): # Если полигон из трех вершин, проверка на компланарность не нужна.", "obj.hatcher.from_mask_6.decode('utf-8'), obj.hatcher.from_mask_7.decode('utf-8'), obj.hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.into_mask_1.decode('utf-8'), obj.hatcher.into_mask_2.decode('utf-8'), obj.hatcher.into_mask_3.decode('utf-8'), obj.hatcher.into_mask_4.decode('utf-8'), obj.hatcher.into_mask_5.decode('utf-8'), obj.hatcher.into_mask_6.decode('utf-8'),", "context): start_time = datetime.now() context.view_layer.update() # Объединяем путь проекта и относительную директорию сцены.", "из треугольников. for poly in triangles: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad =", "else: # Если есть такой ключ, добавляем к списку. dict_named[obj.data.materials[poly.material_index].name].append(poly) def colnode_add_dict(collision_node_dict, quad,", "geom_node def camera_create(obj, scene): frame_size = obj.data.view_frame(scene = scene) if obj.data.type == 'PERSP':", "== \"LIGHT\": create_object = \"LIGHT\" # Если объект является камерой. if obj.type ==", "# Создаем формат. my_format = GeomVertexFormat() my_format.addArray(geom_vertex_format) # Регистрируем формат. end_format = GeomVertexFormat.registerFormat(my_format)", "= obj.data.vertices[poly.vertices[2]].co - obj.data.vertices[poly.vertices[0]].co for index in poly.vertices[3:]: if abs(distance_point_to_plane(obj.data.vertices[index].co, obj.data.vertices[poly.vertices[0]].co, v1.cross(v2))) <", "NodePath(create_object(obj, scene)) #np.setName(obj.name) #np.show() # Проверяем есть ли такой объект в иерархии. result", "Если вершины три, это значит полигон автоматически копланарен. if len(poly.vertices) == 3: status", "for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[2]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[2]].uv[0], obj.data.uv_layers.active.data[triangle.loops[2]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[2]].uv[0],", "# Так же создаем дополнительные слои. for col in obj.data.vertex_colors: # Если имя", "parent: npp = NodePath(create_object(obj, scene)) #npp.setName(obj.name) #npp.show() npp.reparentTo(root) npp.set_transform(root, conversion_transform(obj)) else: # Если", "class CheckingQuad(bpy.types.Operator): bl_idname = \"ui.check_quad\" bl_label = \"Checking_quad\" def execute(self, context): select_not_quad(context.object) return", "in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[2]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[2]].uv[0], obj.data.uv_layers.active.data[triangle.loops[2]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[2]].uv[0], obj.data.uv_layers[name].data[triangle.loops[2]].uv[1]) if", "poly in named_not_coplanar[name]: for vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name)", "список выбранных объектов. for obj in context.selected_objects: # Объединяем путь проекта и относительную", "obj.data.uv_layers.active.data[triangle.loops[0]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[0]].uv[0], obj.data.uv_layers[name].data[triangle.loops[0]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[0]) if name", "нет то создаем. checkcreate_dirs(path_project_save) # Объединяем путь директории и имя файла. path_save =", "name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[2]].color[0], obj.data.vertex_colors.active.data[triangle.loops[2]].color[1], obj.data.vertex_colors.active.data[triangle.loops[2]].color[2], obj.data.vertex_colors.active.data[triangle.loops[2]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[2]].color[0], obj.data.vertex_colors[name].data[triangle.loops[2]].color[1], obj.data.vertex_colors[name].data[triangle.loops[2]].color[2], obj.data.vertex_colors[name].data[triangle.loops[2]].color[3]) list_vertext[triangle.loops[2]]", "texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[1]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[1]].uv[0], obj.data.uv_layers.active.data[triangle.loops[1]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[1]].uv[0], obj.data.uv_layers[name].data[triangle.loops[1]].uv[1]) if color:", "= {} triangles = [] coplanar = [] not_coplanar = [] not_quad =", "node = build_hierarchy(obj, context.scene) # Объединяем путь директории и имя файла. path_save =", "вершин, необходимо разбить на треугольники. elif len(poly.vertices) >= 4: not_quad.append(poly) else: # Если", "= obj.data.vertices[poly.vertices[1]].co - obj.data.vertices[poly.vertices[0]].co v2 = obj.data.vertices[poly.vertices[2]].co - obj.data.vertices[poly.vertices[0]].co for index in poly.vertices[3:]:", "texcoord_vertex_list[name].set_row(triangle.loops[2]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[2]].uv[0], obj.data.uv_layers.active.data[triangle.loops[2]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[2]].uv[0], obj.data.uv_layers[name].data[triangle.loops[2]].uv[1]) if color: for", "os.path.join(context.scene.hatcher.ful_path_project, obj.hatcher.rel_path_object) # Проверяем существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save) #", "index in poly.vertices[2:]: add_polygons_to_dict(named_triangles, poly, obj) # Если у полигона четыре вершины, необходимо", "# Объединяем путь проекта и относительную директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_scene) #", "return trangle def add_polygons_to_dict(dict_named, poly, obj): # Если нет такого ключа в словаре.", "Создаем формат. my_format = GeomVertexFormat() my_format.addArray(geom_vertex_format) # Регистрируем формат. end_format = GeomVertexFormat.registerFormat(my_format) return", "= os.path.join(path_project_save, context.scene.name) bam_writer_file(path_save, root) show_message_box('Export scene, completed, time: {}'.format(datetime.now() - start_time), \"Message\")", "директория, если нет то создаем. checkcreate_dirs(path_project_save) # Объединяем путь директории и имя файла.", "def show_message_box(message = \"\", title = \"Message Box\", icon = 'INFO'): def draw(self,", "index_tr = 0 for index in poly.vertices: triangulator3.add_polygon_vertex(index_tr) triangulator3.add_vertex(*obj.data.vertices[index].co) index_tr += 1 triangulator3.triangulate()", "in triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) # Нужно разбить полигоны", "obj.hatcher.from_mask_5.decode('utf-8'), obj.hatcher.from_mask_6.decode('utf-8'), obj.hatcher.from_mask_7.decode('utf-8'), obj.hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.into_mask_1.decode('utf-8'), obj.hatcher.into_mask_2.decode('utf-8'), obj.hatcher.into_mask_3.decode('utf-8'), obj.hatcher.into_mask_4.decode('utf-8'), obj.hatcher.into_mask_5.decode('utf-8'),", ">= 4: not_quad.append(poly) ######################## ######################## group = NodePath(obj.name) collision_node_dict = {} vertext_quad =", "= False for poly in not_quad: poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def check_coplanar(obj,", "= os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_other) # Проверяем существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save)", "vdata = GeomVertexData(obj.data.name, geom_vertex_format[0], Geom.UHStatic) vdata.set_num_rows(len(obj.data.vertices)) vertex_position = GeomVertexWriter(vdata, 'vertex') normal_vertex = GeomVertexWriter(vdata,", "triangulator3.add_vertex(*obj.data.vertices[index].co) index_tr += 1 triangulator3.triangulate() for i in range(triangulator3.getNumTriangles()): v0 = triangulator3.get_vertex(triangulator3.get_triangle_v0(i)) v1", "содережит функцию необходимую для экспорта данного типа объекта. create_object = None # Если", "объекта. if obj.data.uv_layers.active: texcoord = True # Создаем колонку для координат c именем", "not_coplanar.append(poly) # Если у полигона более четырех вершин, необходимо разбить на треугольники. elif", "if not obj.data.type == 'PANO': node = build_hierarchy(obj, context.scene) # Объединяем путь директории", "in not_coplanar: for vertext in triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad)", "= build_hierarchy(obj, context.scene) # Объединяем путь директории и имя файла. path_save = os.path.join(path_project_save,", "class ExportSelected(bpy.types.Operator): bl_idname = \"ui.export_selected\" bl_label = \"Generator_selected\" def execute(self, context): start_time =", "in named_not_coplanar[name]: for vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) #", "4: add_polygons_to_dict(named_not_quad, poly, obj) # Если нет материала, то рассортировываем по спискам else:", "texcoord = True # Создаем колонку для координат c именем по умолчанию. geom_vertex_format.add_column(\"texcoord\",", "if obj.data.materials[collision_node.name].hatcher.visibility_collision_polygons: node_path.show() collision_node = CollisionNode(obj.name) # Создаем полигоны столкновения из треугольников. for", "материала, то рассортировываем по спискам else: # Если полигон из трех вершин, проверка", "Выполним рекурсию, для поиска всех. def recurse(obj, parent): # Переменая которая содережит функцию", "имя не совподает с активным. if not col.name == obj.data.vertex_colors.active.name: geom_vertex_format.add_column('color.{}'.format(col.name), 4, Geom.NT_uint8,", "if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[1]].color[0], obj.data.vertex_colors.active.data[triangle.loops[1]].color[1], obj.data.vertex_colors.active.data[triangle.loops[1]].color[2], obj.data.vertex_colors.active.data[triangle.loops[1]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[1]].color[0], obj.data.vertex_colors[name].data[triangle.loops[1]].color[1], obj.data.vertex_colors[name].data[triangle.loops[1]].color[2], obj.data.vertex_colors[name].data[triangle.loops[1]].color[3])", "poly): not_coplanar.append(poly) for i in obj.data.vertices: i.select=False for i in obj.data.edges: i.select=False for", "dict_named: # Дабавляем ключ и список. dict_named[obj.data.materials[poly.material_index].name] = [poly] else: # Если есть", "from panda3d.core import BamFile, BamWriter, Filename, Notify from panda3d.core import CollisionPolygon, CollisionNode import", "Перебираем полигоны объекта. for poly in obj.data.polygons: # Если список материалов не пуст.", "объект в иерархии. result = root.find('**/{}'.format(parent.name)) if result: np.reparentTo(result) np.set_transform(root, conversion_transform(obj)) else: np.reparentTo(root)", "normal_vertex.set_row(triangle.loops[2]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[2]].co[0], obj.data.vertices[triangle.vertices[2]].co[1], obj.data.vertices[triangle.vertices[2]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[2]].normal[0], obj.data.vertices[triangle.vertices[2]].normal[1], obj.data.vertices[triangle.vertices[2]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2])", "компланарность. elif len(poly.vertices) == 4: if check_coplanar(obj, poly): coplanar.append(poly) else: not_coplanar.append(poly) # Если", "= GeomNode(obj.data.name) geom_node.addGeom(geom) return geom_node def camera_create(obj, scene): frame_size = obj.data.view_frame(scene = scene)", "collision_node.getNumSolids() >= 1: node_path = NodePath(collision_node) node_path.reparentTo(group) # Если стоит флажок показывать полигон", "выбранных объектов. for obj in context.selected_objects: # Объединяем путь проекта и относительную директорию", "from panda3d.core import CollisionPolygon, CollisionNode import bpy import bmesh from mathutils.geometry import distance_point_to_plane", "vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]) collision_node.add_solid(quad) vertext_quad = [] # Нужно", "import bpy import bmesh from mathutils.geometry import distance_point_to_plane ostream = Notify.out() list_object_support =", "obj in context.scene.objects: # Нас интересуют объекты только без родителя. if not obj.parent:", "'ORTHO': lens = OrthographicLens() lens.set_film_size(abs(frame_size[0][0]) + abs(frame_size[1][0]), abs(frame_size[0][1]) + abs(frame_size[1][1])) lens.set_focal_length(abs(frame_size[0][2])) lens.set_near_far(obj.data.clip_start, obj.data.clip_end)", "по умолчанию. geom_vertex_format.add_column(\"texcoord\", 2, Geom.NT_float32, Geom.C_texcoord) # Так же создаем дополнительные колонки. for", "node_path.show() return group.node().getChild(0) def geom_node_create(obj, scene): geom = geom_create(obj) geom_node = GeomNode(obj.data.name) geom_node.addGeom(geom)", "writer: BamWriter = file.getWriter() writer.writeObject(obj) writer.flush() file.close() def conversion_transform(obj): pos = Point3(*obj.matrix_world.translation) quat", "else: np.reparentTo(root) np.set_transform(root, conversion_transform(obj)) # Проходим по детям. for child in obj.children: recurse(child,", "создаем. checkcreate_dirs(path_project_save) # Создаем корень для объединения. root = ModelRoot('{}.bam'.format(context.scene.name)) # Пройдем по", "на компланарность. elif len(poly.vertices) == 4: # Если полигон компланарный if check_coplanar(obj, poly):", "for vertext in triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) from_mask =", "geom_vertex_format.add_column(\"color\", 4, Geom.NT_uint8, Geom.C_color) # Так же создаем дополнительные колонки. for col in", "Geom.NT_float32, Geom.C_point) geom_vertex_format.add_column(\"normal\", 3, Geom.NT_float32, Geom.C_normal) # Проверка есть ли цвета вершин у", "lens.set_coordinate_system(CS_invalid) camera = Camera(obj.data.name) camera.active = obj.hatcher.camera_active bit = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.draw_mask_1.decode('utf-8'), obj.hatcher.draw_mask_2.decode('utf-8'), obj.hatcher.draw_mask_3.decode('utf-8'), obj.hatcher.draw_mask_4.decode('utf-8'),", "2, Geom.NT_float32, Geom.C_texcoord) # Так же создаем дополнительные колонки. for uv in obj.data.uv_layers:", "= \"Generator_scene\" def execute(self, context): start_time = datetime.now() context.view_layer.update() # Объединяем путь проекта", "obj) # Если у полигона четыре вершины, необходимо проверить на компланарность. elif len(poly.vertices)", "# Регистрируем формат. end_format = GeomVertexFormat.registerFormat(my_format) return end_format, color, texcoord def geom_create(obj): geom_vertex_format", "if len(poly.vertices) == 3: status = True elif len(poly.vertices) >= 3: v1 =", "{}'.format(datetime.now() - start_time), \"Message\") return {'FINISHED'} class CheckingCoplanarity(bpy.types.Operator): bl_idname = \"ui.check_coplanarity\" bl_label =", "CollisionPolygon, CollisionNode import bpy import bmesh from mathutils.geometry import distance_point_to_plane ostream = Notify.out()", "if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[2]].uv[0], obj.data.uv_layers.active.data[triangle.loops[2]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[2]].uv[0], obj.data.uv_layers[name].data[triangle.loops[2]].uv[1]) if color: for name", "for poly in named_coplanar[name]: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2],", "# Если стоит флажок показывать полигон столкновения. if obj.hatcher.visibility_collision_polygons: node_path.show() return group.node().getChild(0) def", "'PERSP': False, 'ORTHO': False, 'CAMERA':True} def show_message_box(message = \"\", title = \"Message Box\",", "под этим именем. if hasattr(obj.data.materials[poly.material_index], 'name'): # Если полигон из трех вершин, проверка", "for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[1]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[1]].uv[0], obj.data.uv_layers.active.data[triangle.loops[1]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[1]].uv[0],", "полигона более четырех вершин, необходимо разбить на треугольники. elif len(poly.vertices) >= 4: not_quad.append(poly)", "есть ли цвета вершин у объекта. if obj.data.vertex_colors.active: color = True # Создаем", "scene) if obj.data.type == 'PERSP': lens = PerspectiveLens() if obj.data.type == 'ORTHO': lens", "in not_quad: for vertext in triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad)", "vertext[1], vertext[2]), name) # Создаем полигоны столкновения из многоугольников. for name in named_not_quad:", "# Обработка второй вершины. if not triangle.loops[1] in list_vertext: vertex_position.set_row(triangle.loops[1]) normal_vertex.set_row(triangle.loops[1]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[1]].co[0], obj.data.vertices[triangle.vertices[1]].co[1],", "# Если имя не совподает с активным. if not col.name == obj.data.vertex_colors.active.name: color_vertex_list[col.name]", "координаты у объекта. if obj.data.uv_layers.active: texcoord = True # Создаем колонку для координат", "{} triangles = [] coplanar = [] not_coplanar = [] not_quad = []", "True # Создаем колонку для координат c именем по умолчанию. geom_vertex_format.add_column(\"texcoord\", 2, Geom.NT_float32,", "checkcreate_dirs(path_project_save) # Если поле имени файла заполнено, то объеденяем в один файл. if", "по спискам else: # Если полигон из трех вершин, проверка на компланарность не", "= [poly] else: # Если есть такой ключ, добавляем к списку. dict_named[obj.data.materials[poly.material_index].name].append(poly) def", "context): select_not_coplanar(context.object) return {'FINISHED'} class CheckingQuad(bpy.types.Operator): bl_idname = \"ui.check_quad\" bl_label = \"Checking_quad\" def", "= {} triangulator3 = Triangulator3() index_tr = 0 for index in poly.vertices: triangulator3.add_polygon_vertex(index_tr)", "in context.selected_objects: # Проверим есть ли данный тип объекта среди поддерживаемых. if obj.type", "import datetime class ExportObject(bpy.types.Operator): bl_idname = \"ui.export_object\" bl_label = \"Generator_object\" def execute(self, context):", "named_triangles[name]: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]), name) vertext_quad =", "#print(error) pass def bam_writer_file(path_save, obj): file = BamFile() file.openWrite(Filename.fromOsSpecific(path_save + '.bam')) writer: BamWriter", "obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) # Создаем полигоны столкновения из многоугольников. for", "то рассортировываем по спискам else: # Если полигон из трех вершин, проверка на", "= triangulator3.get_vertex(triangulator3.get_triangle_v1(i)) v2 = triangulator3.get_vertex(triangulator3.get_triangle_v2(i)) trangle[i] = ((v0[0], v0[1], v0[2]), (v1[0], v1[1], v1[2]),", "нет родителя. np = NodePath(create_object(obj, scene)) #np.setName(obj.name) #np.show() # Проверяем есть ли такой", "== 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[2]].color[0], obj.data.vertex_colors.active.data[triangle.loops[2]].color[1], obj.data.vertex_colors.active.data[triangle.loops[2]].color[2], obj.data.vertex_colors.active.data[triangle.loops[2]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[2]].color[0], obj.data.vertex_colors[name].data[triangle.loops[2]].color[1], obj.data.vertex_colors[name].data[triangle.loops[2]].color[2], obj.data.vertex_colors[name].data[triangle.loops[2]].color[3]) list_vertext[triangle.loops[2]] =", "context.view_layer.update() # Объединяем путь проекта и относительную директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_scene)", "Объединяем путь директории и имя файла. path_save = os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) show_message_box('Export", "4: # Если полигон компланарный if check_coplanar(obj, poly): add_polygons_to_dict(named_coplanar, poly, obj) else: add_polygons_to_dict(named_not_coplanar,", "recurse(obj, obj.parent) return root.node().getChild(0) import os from datetime import datetime class ExportObject(bpy.types.Operator): bl_idname", "context.scene) root = ModelRoot('{}.bam'.format(obj.name)) root.add_child(node) bam_writer_file(path_save, root) show_message_box('Export object: {} completed, time: {}'.format(obj.name,", "if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[0]].color[0], obj.data.vertex_colors.active.data[triangle.loops[0]].color[1], obj.data.vertex_colors.active.data[triangle.loops[0]].color[2], obj.data.vertex_colors.active.data[triangle.loops[0]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[0]].color[0], obj.data.vertex_colors[name].data[triangle.loops[0]].color[1], obj.data.vertex_colors[name].data[triangle.loops[0]].color[2], obj.data.vertex_colors[name].data[triangle.loops[0]].color[3])", "= CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]) collision_node.add_solid(quad) vertext_quad = [] # Создаем полигоны столкновения из", "типа объекта. create_object = None # Если объект является сеткой. if obj.type ==", "scene)) #npp.setName(obj.name) #npp.show() npp.reparentTo(root) npp.set_transform(root, conversion_transform(obj)) else: # Если нет родителя. np =", "obj.type == \"MESH\": if obj.hatcher.type_mesh == \"Render\": create_object = geom_node_create if obj.hatcher.type_mesh ==", "obj.data.materials[collision_node.name].hatcher.into_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) node_path = NodePath(collision_node) node_path.reparentTo(group) if obj.data.materials[collision_node.name].hatcher.visibility_collision_polygons:", "obj.data mesh.calc_loop_triangles() # Сюда записиваются индексы обработаных вершин. list_vertext = {} # Проходим", "colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]), name) vertext_quad = [] # Создаем полигоны столкновения из", "show_message_box('Export selected, completed, time: {}'.format(datetime.now() - start_time), \"Message\") return {'FINISHED'} class CheckingCoplanarity(bpy.types.Operator): bl_idname", "name) vertext_quad = [] # Создаем полигоны столкновения из компланарных прямольников. for name", "named_not_coplanar[name]: for vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) # Создаем", "ли такой объект в иерархии. result = root.find('**/{}'.format(parent.name)) if result: np.reparentTo(result) np.set_transform(root, conversion_transform(obj))", "#npp.setName(obj.name) #npp.show() npp.reparentTo(root) npp.set_transform(root, conversion_transform(obj)) else: # Если нет родителя. np = NodePath(create_object(obj,", "GeomVertexWriter(vdata, 'texcoord')} # Так же создаем дополнительные слои. for uv in obj.data.uv_layers: #", "# Создаем полигоны столкновения из многоугольников. for name in named_not_quad: # Нужно разбить", "если нет то создаем. checkcreate_dirs(path_project_save) # Объединяем путь директории и имя файла. path_save", "obj.hatcher.into_mask_7.decode('utf-8'), obj.hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) # Если полигон столкновения содержит тела. if collision_node.getNumSolids() >=", "\"CS_yup_right\": lens.set_coordinate_system(CS_yup_right) if obj.hatcher.coordinate_system == \"CS_zup_left\": lens.set_coordinate_system(CS_zup_left) if obj.hatcher.coordinate_system == \"CS_yup_left\": lens.set_coordinate_system(CS_yup_left) if", "named_not_coplanar: # Нужно разбить некомпланарные полигоны, на треугольники. for poly in named_not_coplanar[name]: for", "normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[2]) if name ==", "= Point3(*obj.matrix_world.to_scale()) transform = TransformState.make_pos_quat_scale(pos, quat, scale) return transform def get_format(obj): color =", "у объекта. if obj.data.vertex_colors.active: color = True # Создаем колонку для цвета c", "вершин. geom_vertex_format.add_column(\"vertex\", 3, Geom.NT_float32, Geom.C_point) geom_vertex_format.add_column(\"normal\", 3, Geom.NT_float32, Geom.C_normal) # Проверка есть ли", "icon) def checkcreate_dirs(path_project_save): # Проверяем существует ли директория, если нет то создаем. if", "if obj.data.type == 'PERSP': lens = PerspectiveLens() if obj.data.type == 'ORTHO': lens =", "и относительную директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_other) # Проверяем существует ли директория,", "= '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.into_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) node_path =", "bam_writer_file(path_save, node) else: node = build_hierarchy(obj, context.scene) # Объединяем путь директории и имя", "vertext in triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) # Нужно разбить", ">= 4: not_quad.append(poly) else: # Если полигон из трех вершин, проверка на компланарность", "in obj.children: recurse(child, obj) recurse(obj, obj.parent) return root.node().getChild(0) import os from datetime import", "else: node = build_hierarchy(obj, context.scene) root.add_child(node) # Объединяем путь директории и имя файла.", "# Проходим по треугольниуам. for triangle in mesh.loop_triangles: # Обработка первой вершины. if", "= os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) show_message_box('Export selected, completed, time: {}'.format(datetime.now() - start_time), \"Message\")", "color_vertex_list: color_vertex_list[name].set_row(triangle.loops[1]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[1]].color[0], obj.data.vertex_colors.active.data[triangle.loops[1]].color[1], obj.data.vertex_colors.active.data[triangle.loops[1]].color[2], obj.data.vertex_colors.active.data[triangle.loops[1]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[1]].color[0], obj.data.vertex_colors[name].data[triangle.loops[1]].color[1],", "# Если вершины три, это значит полигон автоматически копланарен. if len(poly.vertices) == 3:", "четыре вершины, необходимо проверить на компланарность. elif len(poly.vertices) == 4: # Если полигон", "list_vertext: vertex_position.set_row(triangle.loops[1]) normal_vertex.set_row(triangle.loops[1]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[1]].co[0], obj.data.vertices[triangle.vertices[1]].co[1], obj.data.vertices[triangle.vertices[1]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[1]].normal[0], obj.data.vertices[triangle.vertices[1]].normal[1], obj.data.vertices[triangle.vertices[1]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0],", "- start_time), \"Message\") return {'FINISHED'} class ExportSelected(bpy.types.Operator): bl_idname = \"ui.export_selected\" bl_label = \"Generator_selected\"", "используются цвета вершин. if color: color_vertex_list = {'color': GeomVertexWriter(vdata, 'color')} # Так же", "CollisionNode(name) collision_node.add_solid(quad) collision_node_dict[name] = collision_node def collision_polygon_create(obj, scene): named_triangles = {} named_coplanar =", "color_vertex_list = {'color': GeomVertexWriter(vdata, 'color')} # Так же создаем дополнительные слои. for col", "obj.data.polygons: i.select = False for poly in not_coplanar: poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\")", "третьей вершины. if not triangle.loops[2] in list_vertext: vertex_position.set_row(triangle.loops[2]) normal_vertex.set_row(triangle.loops[2]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[2]].co[0], obj.data.vertices[triangle.vertices[2]].co[1], obj.data.vertices[triangle.vertices[2]].co[2]) if", "= {'texcoord': GeomVertexWriter(vdata, 'texcoord')} # Так же создаем дополнительные слои. for uv in", "start_time), \"Message\") return {'FINISHED'} class ExportSelected(bpy.types.Operator): bl_idname = \"ui.export_selected\" bl_label = \"Generator_selected\" def", "# Если есть такой ключ, добавляем к списку. dict_named[obj.data.materials[poly.material_index].name].append(poly) def colnode_add_dict(collision_node_dict, quad, name):", "необходимо проверить на компланарность. elif len(poly.vertices) == 4: # Если полигон компланарный if", "= \"\", title = \"Message Box\", icon = 'INFO'): def draw(self, context): self.layout.label(text", "у полигона более четырех вершин, необходимо разбить на треугольники. elif len(poly.vertices) >= 4:", "сцене. for obj in context.scene.objects: # Нас интересуют объекты только без родителя. if", "path_project_save = os.path.join(context.scene.hatcher.ful_path_project, obj.hatcher.rel_path_object) # Проверяем существует ли директория, если нет то создаем.", "= [] not_quad = [] # Перебираем полигоны объекта. for poly in obj.data.polygons:", "Filename, Notify from panda3d.core import CollisionPolygon, CollisionNode import bpy import bmesh from mathutils.geometry", "obj.data.vertices[triangle.vertices[2]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[2]].normal[0], obj.data.vertices[triangle.vertices[2]].normal[1], obj.data.vertices[triangle.vertices[2]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for", "geom = geom_create(obj) geom_node = GeomNode(obj.data.name) geom_node.addGeom(geom) return geom_node def camera_create(obj, scene): frame_size", "coplanar: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]) collision_node.add_solid(quad)", "obj.parent) return root.node().getChild(0) import os from datetime import datetime class ExportObject(bpy.types.Operator): bl_idname =", "bpy.ops.mesh.select_mode(type=\"FACE\") def triangle_poly(poly, obj): trangle = {} triangulator3 = Triangulator3() index_tr = 0", "слот материала и он содержит имя, рассортировываем их по словарям под этим именем.", "check_coplanar(obj, poly): not_coplanar.append(poly) for i in obj.data.vertices: i.select=False for i in obj.data.edges: i.select=False", "not_quad.append(poly) else: # Если полигон из трех вершин, проверка на компланарность не нужна.", "geom_vertex_format.add_column(\"normal\", 3, Geom.NT_float32, Geom.C_normal) # Проверка есть ли цвета вершин у объекта. if", "на компланарность. elif len(poly.vertices) == 4: if check_coplanar(obj, poly): coplanar.append(poly) else: not_coplanar.append(poly) #", "CollisionNode(obj.name) # Создаем полигоны столкновения из треугольников. for poly in triangles: for index", "not obj.data.type == 'PANO': node = build_hierarchy(obj, context.scene) # Объединяем путь директории и", ">= 5: not_quad.append(poly) for i in obj.data.vertices: i.select=False for i in obj.data.edges: i.select=False", "- obj.data.vertices[poly.vertices[0]].co for index in poly.vertices[3:]: if abs(distance_point_to_plane(obj.data.vertices[index].co, obj.data.vertices[poly.vertices[0]].co, v1.cross(v2))) < 1e-6: status", "такой ключ, добавляем к списку. dict_named[obj.data.materials[poly.material_index].name].append(poly) def colnode_add_dict(collision_node_dict, quad, name): if name in", "Если имя не совподает с активным. if not uv.name == obj.data.uv_layers.active.name: texcoord_vertex_list[uv.name] =", "in named_triangles[name]: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]), name) vertext_quad", "for name in named_not_quad: # Нужно разбить многоугольники на треугольники. for poly in", "TransformState.make_pos_quat_scale(pos, quat, scale) return transform def get_format(obj): color = False texcoord = False", "Geom.C_texcoord) # Создаем формат. my_format = GeomVertexFormat() my_format.addArray(geom_vertex_format) # Регистрируем формат. end_format =", "if obj.data.type != 'PANO': create_object = camera_create # Если есть родитель. if not", "root.node().getChild(0) import os from datetime import datetime class ExportObject(bpy.types.Operator): bl_idname = \"ui.export_object\" bl_label", "import GeomNode, PandaNode, NodePath, ModelRoot from panda3d.core import BamFile, BamWriter, Filename, Notify from", "root) show_message_box('Export scene, completed, time: {}'.format(datetime.now() - start_time), \"Message\") return {'FINISHED'} class ExportSelected(bpy.types.Operator):", "obj.data.type == 'PERSP': lens = PerspectiveLens() if obj.data.type == 'ORTHO': lens = OrthographicLens()", "vertext_quad[2]) collision_node.add_solid(quad) vertext_quad = [] # Создаем полигоны столкновения из компланарных прямольников. for", "относительную директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_scene) # Проверяем существует ли директория, если", "conversion_transform(obj)) # Проходим по детям. for child in obj.children: recurse(child, obj) recurse(obj, obj.parent)", "geom def select_not_quad(obj): not_quad = [] for poly in obj.data.polygons: if len(poly.vertices) >=", "poly in not_coplanar: for vertext in triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0], vertext[1], vertext[2])", "BamWriter, Filename, Notify from panda3d.core import CollisionPolygon, CollisionNode import bpy import bmesh from", "collision_node.add_solid(quad) # Нужно разбить полигоны у которых более четырех сторон на треугольники. for", "in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[0]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[0]].uv[0], obj.data.uv_layers.active.data[triangle.loops[0]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[0]].uv[0], obj.data.uv_layers[name].data[triangle.loops[0]].uv[1]) if", "= title, icon = icon) def checkcreate_dirs(path_project_save): # Проверяем существует ли директория, если", "vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]) collision_node.add_solid(quad) vertext_quad = [] # Создаем полигоны", "LQuaternion from panda3d.core import Camera, PerspectiveLens, OrthographicLens, CS_default, CS_zup_right, CS_yup_right, CS_zup_left, CS_yup_left, CS_invalid", "в примитив. prim.addVertices(triangle.loops[0], triangle.loops[1], triangle.loops[2]) prim.closePrimitive() geom = Geom(vdata) geom.addPrimitive(prim) return geom def", "= True # Создаем колонку для координат c именем по умолчанию. geom_vertex_format.add_column(\"texcoord\", 2,", "CS_invalid from panda3d.core import GeomVertexArrayFormat, Geom, GeomVertexFormat, GeomVertexData, GeomVertexWriter, Triangulator3, GeomTriangles from panda3d.core", "Переменая которая содережит функцию необходимую для экспорта данного типа объекта. create_object = None", "build_hierarchy(obj, context.scene) root.add_child(node) # Объединяем путь директории и имя сцены. path_save = os.path.join(path_project_save,", "компланарных прямольников. for name in named_coplanar: for poly in named_coplanar[name]: for index in", "# Проходим по детям. for child in obj.children: recurse(child, obj) recurse(obj, obj.parent) return", "\"Generator_selected\" def execute(self, context): start_time = datetime.now() context.view_layer.update() # Объединяем путь проекта и", "obj.data.vertices[triangle.vertices[0]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[0]].normal[0], obj.data.vertices[triangle.vertices[0]].normal[1], obj.data.vertices[triangle.vertices[0]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for", "obj.data.uv_layers[name].data[triangle.loops[2]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[2]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[2]].color[0],", "ли директория, если нет то создаем. checkcreate_dirs(path_project_save) # Если поле имени файла заполнено,", "'INFO'): def draw(self, context): self.layout.label(text = message) bpy.context.window_manager.popup_menu(draw, title = title, icon =", "if not triangle.loops[1] in list_vertext: vertex_position.set_row(triangle.loops[1]) normal_vertex.set_row(triangle.loops[1]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[1]].co[0], obj.data.vertices[triangle.vertices[1]].co[1], obj.data.vertices[triangle.vertices[1]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[1]].normal[0],", "v1[1], v1[2]), (v2[0], v2[1], v2[2])) return trangle def add_polygons_to_dict(dict_named, poly, obj): # Если", "obj.data.view_frame(scene = scene) if obj.data.type == 'PERSP': lens = PerspectiveLens() if obj.data.type ==", "if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[0]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[0]].color[0], obj.data.vertex_colors.active.data[triangle.loops[0]].color[1],", "os.path.join(path_project_save, obj.name) node = build_hierarchy(obj, context.scene) root = ModelRoot('{}.bam'.format(obj.name)) root.add_child(node) bam_writer_file(path_save, root) show_message_box('Export", "for poly in triangles: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1],", "== 'ORTHO': lens = OrthographicLens() lens.set_film_size(abs(frame_size[0][0]) + abs(frame_size[1][0]), abs(frame_size[0][1]) + abs(frame_size[1][1])) lens.set_focal_length(abs(frame_size[0][2])) lens.set_near_far(obj.data.clip_start,", "'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[0]].uv[0], obj.data.uv_layers.active.data[triangle.loops[0]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[0]].uv[0], obj.data.uv_layers[name].data[triangle.loops[0]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[0])", "проекта и относительную директорию модели. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, obj.hatcher.rel_path_object) # Проверяем существует ли", "# Если полигон из трех вершин, проверка на компланарность не нужна. if len(poly.vertices)", "obj).values(): quad = CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) # Нужно разбить полигоны у которых", "столкновения из компланарных прямольников. for poly in coplanar: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co))", "путь директории и имя файла. path_save = os.path.join(path_project_save, context.scene.hatcher.file_name_selected) bam_writer_file(path_save, root) # Если", "нет такого ключа в словаре. if not obj.data.materials[poly.material_index].name in dict_named: # Дабавляем ключ", "объект является камерой. if obj.type == \"CAMERA\": if obj.data.type != 'PANO': create_object =", "вершин. list_vertext = {} # Проходим по треугольниуам. for triangle in mesh.loop_triangles: #", "GeomNode, PandaNode, NodePath, ModelRoot from panda3d.core import BamFile, BamWriter, Filename, Notify from panda3d.core", "datetime.now() context.view_layer.update() # Объединяем путь проекта и относительную директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project,", "return status def select_not_coplanar(obj): not_coplanar = [] for poly in obj.data.polygons: if not", "as error: #print(error) pass def bam_writer_file(path_save, obj): file = BamFile() file.openWrite(Filename.fromOsSpecific(path_save + '.bam'))", "collision_node_dict: collision_node_dict[name].add_solid(quad) else: collision_node = CollisionNode(name) collision_node.add_solid(quad) collision_node_dict[name] = collision_node def collision_polygon_create(obj, scene):", "return end_format, color, texcoord def geom_create(obj): geom_vertex_format = get_format(obj) color = geom_vertex_format[1] texcoord", "triangles: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]) collision_node.add_solid(quad) vertext_quad", "def geom_create(obj): geom_vertex_format = get_format(obj) color = geom_vertex_format[1] texcoord = geom_vertex_format[2] vdata =", "активным. if not uv.name == obj.data.uv_layers.active.name: texcoord_vertex_list[uv.name] = GeomVertexWriter(vdata, 'texcoord.{}'.format(uv.name)) # Запишем порядок", "# Если есть ли подтип. if list_object_support[obj.type]: if not obj.data.type == 'PANO': node", "# Проверим есть ли данный тип объекта среди поддерживаемых. if obj.type in list_object_support:", "активным. if not col.name == obj.data.vertex_colors.active.name: geom_vertex_format.add_column('color.{}'.format(col.name), 4, Geom.NT_uint8, Geom.C_color) # Проверка есть", "index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]), name) vertext_quad = [] #", "= None # Добавляем вершины в примитив. prim.addVertices(triangle.loops[0], triangle.loops[1], triangle.loops[2]) prim.closePrimitive() geom =", "bl_idname = \"ui.export_object\" bl_label = \"Generator_object\" def execute(self, context): start_time = datetime.now() context.view_layer.update()", "цвета вершин. if color: color_vertex_list = {'color': GeomVertexWriter(vdata, 'color')} # Так же создаем", "\"ui.export_scene\" bl_label = \"Generator_scene\" def execute(self, context): start_time = datetime.now() context.view_layer.update() # Объединяем", "object: {} completed, time: {}'.format(obj.name, datetime.now() - start_time), \"Message\") return {'FINISHED'} class ExportScene(bpy.types.Operator):", "вершины в примитив. prim.addVertices(triangle.loops[0], triangle.loops[1], triangle.loops[2]) prim.closePrimitive() geom = Geom(vdata) geom.addPrimitive(prim) return geom", "child in obj.children: recurse(child, obj) recurse(obj, obj.parent) return root.node().getChild(0) import os from datetime", "in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[1]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[1]].color[0], obj.data.vertex_colors.active.data[triangle.loops[1]].color[1], obj.data.vertex_colors.active.data[triangle.loops[1]].color[2], obj.data.vertex_colors.active.data[triangle.loops[1]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[1]].color[0],", "prim.setIndexType(Geom.NT_uint32) mesh = obj.data mesh.calc_loop_triangles() # Сюда записиваются индексы обработаных вершин. list_vertext =", "poly, obj) else: add_polygons_to_dict(named_not_coplanar, poly, obj) # Если у полигона более четырех вершин,", "этим именем. if hasattr(obj.data.materials[poly.material_index], 'name'): # Если полигон из трех вершин, проверка на", "poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]), name) vertext_quad = [] # Создаем полигоны", "pass def bam_writer_file(path_save, obj): file = BamFile() file.openWrite(Filename.fromOsSpecific(path_save + '.bam')) writer: BamWriter =", "poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]) collision_node.add_solid(quad) vertext_quad = [] #", "texcoord: texcoord_vertex_list = {'texcoord': GeomVertexWriter(vdata, 'texcoord')} # Так же создаем дополнительные слои. for", "создаем дополнительные слои. for uv in obj.data.uv_layers: # Если имя не совподает с", "texcoord = geom_vertex_format[2] vdata = GeomVertexData(obj.data.name, geom_vertex_format[0], Geom.UHStatic) vdata.set_num_rows(len(obj.data.vertices)) vertex_position = GeomVertexWriter(vdata, 'vertex')", "ключа в словаре. if not obj.data.materials[poly.material_index].name in dict_named: # Дабавляем ключ и список.", "== 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[2]].uv[0], obj.data.uv_layers.active.data[triangle.loops[2]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[2]].uv[0], obj.data.uv_layers[name].data[triangle.loops[2]].uv[1]) if color: for name in color_vertex_list:", "активным. if not uv.name == obj.data.uv_layers.active.name: geom_vertex_format.add_column('texcoord.{}'.format(uv.name), 2, Geom.NT_float32, Geom.C_texcoord) # Создаем формат.", "np.set_transform(root, conversion_transform(obj)) else: np.reparentTo(root) np.set_transform(root, conversion_transform(obj)) # Проходим по детям. for child in", "for poly in not_quad: poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def check_coplanar(obj, poly): status", "[] not_quad = [] # Перебираем полигоны объекта. for poly in obj.data.polygons: #", "obj.hatcher.rel_path_object) # Проверяем существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save) # Объединяем", "проекта и относительную директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_scene) # Проверяем существует ли", "obj.data.vertices[triangle.vertices[0]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[0]) if", "= [] # Создаем полигоны столкновения из треугольников. for name in named_triangles: for", "Geom.NT_float32, Geom.C_normal) # Проверка есть ли цвета вершин у объекта. if obj.data.vertex_colors.active: color", "if not uv.name == obj.data.uv_layers.active.name: texcoord_vertex_list[uv.name] = GeomVertexWriter(vdata, 'texcoord.{}'.format(uv.name)) # Запишем порядок треугольников.", "camera_create(obj, scene): frame_size = obj.data.view_frame(scene = scene) if obj.data.type == 'PERSP': lens =", "vertext_quad[3]) collision_node.add_solid(quad) vertext_quad = [] # Нужно разбить некомпланарные полигоны, на треугольники. for", "# Добавляем вершины в примитив. prim.addVertices(triangle.loops[0], triangle.loops[1], triangle.loops[2]) prim.closePrimitive() geom = Geom(vdata) geom.addPrimitive(prim)", "name in named_triangles: for poly in named_triangles[name]: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict,", "build_hierarchy(obj, context.scene) root = ModelRoot('{}.bam'.format(obj.name)) root.add_child(node) bam_writer_file(path_save, root) show_message_box('Export object: {} completed, time:", "in poly.vertices[3:]: if abs(distance_point_to_plane(obj.data.vertices[index].co, obj.data.vertices[poly.vertices[0]].co, v1.cross(v2))) < 1e-6: status = True else: status", "mathutils.geometry import distance_point_to_plane ostream = Notify.out() list_object_support = {'MESH': False, 'PERSP': False, 'ORTHO':", "Создаем колонку для вершин. geom_vertex_format.add_column(\"vertex\", 3, Geom.NT_float32, Geom.C_point) geom_vertex_format.add_column(\"normal\", 3, Geom.NT_float32, Geom.C_normal) #", "полигоны столкновения из некомпланарных прямольников. for name in named_not_coplanar: # Нужно разбить некомпланарные", "texcoord_vertex_list[name].set_row(triangle.loops[0]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[0]].uv[0], obj.data.uv_layers.active.data[triangle.loops[0]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[0]].uv[0], obj.data.uv_layers[name].data[triangle.loops[0]].uv[1]) if color: for", "= icon) def checkcreate_dirs(path_project_save): # Проверяем существует ли директория, если нет то создаем.", "bl_idname = \"ui.export_selected\" bl_label = \"Generator_selected\" def execute(self, context): start_time = datetime.now() context.view_layer.update()", "not triangle.loops[2] in list_vertext: vertex_position.set_row(triangle.loops[2]) normal_vertex.set_row(triangle.loops[2]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[2]].co[0], obj.data.vertices[triangle.vertices[2]].co[1], obj.data.vertices[triangle.vertices[2]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[2]].normal[0], obj.data.vertices[triangle.vertices[2]].normal[1],", "def execute(self, context): select_not_coplanar(context.object) return {'FINISHED'} class CheckingQuad(bpy.types.Operator): bl_idname = \"ui.check_quad\" bl_label =", "poly in obj.data.polygons: # Если список материалов не пуст. if len(obj.data.materials) >= 1:", "Geom(vdata) geom.addPrimitive(prim) return geom def select_not_quad(obj): not_quad = [] for poly in obj.data.polygons:", "for col in obj.data.vertex_colors: # Если имя не совподает с активным. if not", "{'FINISHED'} class ExportSelected(bpy.types.Operator): bl_idname = \"ui.export_selected\" bl_label = \"Generator_selected\" def execute(self, context): start_time", "# Объединяем путь директории и имя файла. path_save = os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node)", "цвета c именем по умолчанию. geom_vertex_format.add_column(\"color\", 4, Geom.NT_uint8, Geom.C_color) # Так же создаем", "npp = NodePath(create_object(obj, scene)) #npp.setName(obj.name) #npp.show() npp.reparentTo(root) npp.set_transform(root, conversion_transform(obj)) else: # Если нет", "conversion_transform(obj)) else: np.reparentTo(root) np.set_transform(root, conversion_transform(obj)) # Проходим по детям. for child in obj.children:", "'.bam')) writer: BamWriter = file.getWriter() writer.writeObject(obj) writer.flush() file.close() def conversion_transform(obj): pos = Point3(*obj.matrix_world.translation)", "show_message_box('Export scene, completed, time: {}'.format(datetime.now() - start_time), \"Message\") return {'FINISHED'} class ExportSelected(bpy.types.Operator): bl_idname", "имя сцены. path_save = os.path.join(path_project_save, context.scene.name) bam_writer_file(path_save, root) show_message_box('Export scene, completed, time: {}'.format(datetime.now()", "status = True else: status = False return status def select_not_coplanar(obj): not_coplanar =", "!= 'PANO': create_object = camera_create # Если есть родитель. if not parent: npp", "for poly in coplanar: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1],", "# Если объект является камерой. if obj.type == \"CAMERA\": if obj.data.type != 'PANO':", "нет, то раздельно. else: # Перебираем список выбранных объектов. for obj in context.selected_objects:", "context.scene.hatcher.file_name_selected == '': # Создаем корень для объединения. root = ModelRoot('{}.bam'.format(context.scene.hatcher.file_name_selected)) # Перебираем", "dict_named[obj.data.materials[poly.material_index].name] = [poly] else: # Если есть такой ключ, добавляем к списку. dict_named[obj.data.materials[poly.material_index].name].append(poly)", "\"CS_default\": lens.set_coordinate_system(CS_default) if obj.hatcher.coordinate_system == \"CS_zup_right\": lens.set_coordinate_system(CS_zup_right) if obj.hatcher.coordinate_system == \"CS_yup_right\": lens.set_coordinate_system(CS_yup_right) if", "нужна. if len(poly.vertices) == 3: for index in poly.vertices[2:]: add_polygons_to_dict(named_triangles, poly, obj) #", "for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]) collision_node.add_solid(quad) vertext_quad =", "Если полигон столкновения содержит тела. if collision_node.getNumSolids() >= 1: node_path = NodePath(collision_node) node_path.reparentTo(group)", "треугольники. for poly in not_quad: for vertext in triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0],", "col in obj.data.vertex_colors: # Если имя не совподает с активным. if not col.name", "у которых более четырех сторон на треугольники. for poly in not_quad: for vertext", "if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[1]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[1]].color[0], obj.data.vertex_colors.active.data[triangle.loops[1]].color[1],", "Triangulator3() index_tr = 0 for index in poly.vertices: triangulator3.add_polygon_vertex(index_tr) triangulator3.add_vertex(*obj.data.vertices[index].co) index_tr += 1", "# Проверяем существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save) # Создаем корень", "дополнительные слои. for col in obj.data.vertex_colors: # Если имя не совподает с активным.", "obj.hatcher.coordinate_system == \"CS_zup_right\": lens.set_coordinate_system(CS_zup_right) if obj.hatcher.coordinate_system == \"CS_yup_right\": lens.set_coordinate_system(CS_yup_right) if obj.hatcher.coordinate_system == \"CS_zup_left\":", "= [] coplanar = [] not_coplanar = [] not_quad = [] # Перебираем", "recurse(obj, parent): # Переменая которая содережит функцию необходимую для экспорта данного типа объекта.", "elif len(poly.vertices) >= 4: not_quad.append(poly) ######################## ######################## group = NodePath(obj.name) collision_node_dict = {}", "компланарность не нужна. if len(poly.vertices) == 3: for index in poly.vertices[2:]: triangles.append(poly) #", "status = True elif len(poly.vertices) >= 3: v1 = obj.data.vertices[poly.vertices[1]].co - obj.data.vertices[poly.vertices[0]].co v2", "= get_format(obj) color = geom_vertex_format[1] texcoord = geom_vertex_format[2] vdata = GeomVertexData(obj.data.name, geom_vertex_format[0], Geom.UHStatic)", "# Если полигон столкновения содержит тела. if collision_node.getNumSolids() >= 1: node_path = NodePath(collision_node)", "умолчанию. geom_vertex_format.add_column(\"color\", 4, Geom.NT_uint8, Geom.C_color) # Так же создаем дополнительные колонки. for col", "def execute(self, context): start_time = datetime.now() context.view_layer.update() # Объединяем путь проекта и относительную", "1: # Если есть слот материала и он содержит имя, рассортировываем их по", "else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[1]) if name", "столкновения из многоугольников. for name in named_not_quad: # Нужно разбить многоугольники на треугольники.", "if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[2]].color[0], obj.data.vertex_colors.active.data[triangle.loops[2]].color[1], obj.data.vertex_colors.active.data[triangle.loops[2]].color[2], obj.data.vertex_colors.active.data[triangle.loops[2]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[2]].color[0], obj.data.vertex_colors[name].data[triangle.loops[2]].color[1], obj.data.vertex_colors[name].data[triangle.loops[2]].color[2], obj.data.vertex_colors[name].data[triangle.loops[2]].color[3])", "obj.data.uv_layers.active: texcoord = True # Создаем колонку для координат c именем по умолчанию.", "obj.data.materials[collision_node.name].hatcher.from_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.into_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_5.decode('utf-8'),", "с активным. if not uv.name == obj.data.uv_layers.active.name: texcoord_vertex_list[uv.name] = GeomVertexWriter(vdata, 'texcoord.{}'.format(uv.name)) # Запишем", "Запишем порядок треугольников. prim = GeomTriangles(Geom.UHStatic) prim.makeIndexed() prim.setIndexType(Geom.NT_uint32) mesh = obj.data mesh.calc_loop_triangles() #", "obj.hatcher.draw_mask_7.decode('utf-8'), obj.hatcher.draw_mask_8.decode('utf-8')) camera.camera_mask = int(bit, 2) camera.set_lens(lens) return camera def build_hierarchy(obj, scene): #", "checkcreate_dirs(path_project_save): # Проверяем существует ли директория, если нет то создаем. if not os.path.exists(path_project_save):", "obj.data.materials[collision_node.name].hatcher.from_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.into_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_2.decode('utf-8'),", "среди поддерживаемых. if obj.type in list_object_support: # Если есть ли подтип. if list_object_support[obj.type]:", "рекурсию, для поиска всех. def recurse(obj, parent): # Переменая которая содережит функцию необходимую", "else: # Перебираем список выбранных объектов. for obj in context.selected_objects: # Проверим есть", "obj.data.materials[collision_node.name].hatcher.into_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) node_path = NodePath(collision_node) node_path.reparentTo(group) if obj.data.materials[collision_node.name].hatcher.visibility_collision_polygons: node_path.show() collision_node =", "тела. if collision_node.getNumSolids() >= 1: node_path = NodePath(collision_node) node_path.reparentTo(group) # Если стоит флажок", "i in obj.data.polygons: i.select = False for poly in not_coplanar: poly.select = True", "in mesh.loop_triangles: # Обработка первой вершины. if not triangle.loops[0] in list_vertext: vertex_position.set_row(triangle.loops[0]) normal_vertex.set_row(triangle.loops[0])", "node_path.reparentTo(group) # Если стоит флажок показывать полигон столкновения. if obj.hatcher.visibility_collision_polygons: node_path.show() return group.node().getChild(0)", "вершины. if not triangle.loops[1] in list_vertext: vertex_position.set_row(triangle.loops[1]) normal_vertex.set_row(triangle.loops[1]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[1]].co[0], obj.data.vertices[triangle.vertices[1]].co[1], obj.data.vertices[triangle.vertices[1]].co[2]) if triangle.use_smooth:", "= [] # Нужно разбить некомпланарные полигоны, на треугольники. for poly in not_coplanar:", "Так же создаем дополнительные колонки. for col in obj.data.vertex_colors: # Если имя не", "их по словарям под этим именем. if hasattr(obj.data.materials[poly.material_index], 'name'): # Если полигон из", "Создаем колонку для координат c именем по умолчанию. geom_vertex_format.add_column(\"texcoord\", 2, Geom.NT_float32, Geom.C_texcoord) #", "from panda3d.core import Camera, PerspectiveLens, OrthographicLens, CS_default, CS_zup_right, CS_yup_right, CS_zup_left, CS_yup_left, CS_invalid from", "Объединяем путь директории и имя файла. path_save = os.path.join(path_project_save, context.scene.hatcher.file_name_selected) bam_writer_file(path_save, root) #", "root = ModelRoot('{}.bam'.format(obj.name)) root.add_child(node) bam_writer_file(path_save, root) show_message_box('Export object: {} completed, time: {}'.format(obj.name, datetime.now()", "= [] # Создаем полигоны столкновения из некомпланарных прямольников. for name in named_not_coplanar:", "нет то создаем. checkcreate_dirs(path_project_save) # Если поле имени файла заполнено, то объеденяем в", "root) show_message_box('Export object: {} completed, time: {}'.format(obj.name, datetime.now() - start_time), \"Message\") return {'FINISHED'}", "ли цвета вершин у объекта. if obj.data.vertex_colors.active: color = True # Создаем колонку", "obj.hatcher.from_mask_4.decode('utf-8'), obj.hatcher.from_mask_5.decode('utf-8'), obj.hatcher.from_mask_6.decode('utf-8'), obj.hatcher.from_mask_7.decode('utf-8'), obj.hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.into_mask_1.decode('utf-8'), obj.hatcher.into_mask_2.decode('utf-8'), obj.hatcher.into_mask_3.decode('utf-8'), obj.hatcher.into_mask_4.decode('utf-8'),", "not obj.parent: # Проверим есть ли данный тип объекта среди поддерживаемых. if obj.type", "if len(poly.vertices) == 3: for index in poly.vertices[2:]: triangles.append(poly) # Если у полигона", "# Объединяем путь проекта и относительную директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_other) #", "obj.hatcher.coordinate_system == \"CS_invalid\": lens.set_coordinate_system(CS_invalid) camera = Camera(obj.data.name) camera.active = obj.hatcher.camera_active bit = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.draw_mask_1.decode('utf-8'),", "# Если имя не совподает с активным. if not uv.name == obj.data.uv_layers.active.name: geom_vertex_format.add_column('texcoord.{}'.format(uv.name),", "if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[2]].normal[0], obj.data.vertices[triangle.vertices[2]].normal[1], obj.data.vertices[triangle.vertices[2]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name", "= Triangulator3() index_tr = 0 for index in poly.vertices: triangulator3.add_polygon_vertex(index_tr) triangulator3.add_vertex(*obj.data.vertices[index].co) index_tr +=", "директории и имя файла. path_save = os.path.join(path_project_save, obj.name) node = build_hierarchy(obj, context.scene) root", "текстур. if texcoord: texcoord_vertex_list = {'texcoord': GeomVertexWriter(vdata, 'texcoord')} # Так же создаем дополнительные", "3: for index in poly.vertices[2:]: add_polygons_to_dict(named_triangles, poly, obj) # Если у полигона четыре", "elif len(poly.vertices) == 4: if check_coplanar(obj, poly): coplanar.append(poly) else: not_coplanar.append(poly) # Если у", "# Сюда записиваются индексы обработаных вершин. list_vertext = {} # Проходим по треугольниуам.", "triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[0]].normal[0], obj.data.vertices[triangle.vertices[0]].normal[1], obj.data.vertices[triangle.vertices[0]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in", "OSError as error: #print(error) pass def bam_writer_file(path_save, obj): file = BamFile() file.openWrite(Filename.fromOsSpecific(path_save +", "poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]) collision_node.add_solid(quad) vertext_quad = [] # Создаем", "Если нет, то раздельно. else: # Перебираем список выбранных объектов. for obj in", "poly, obj) # Если у полигона четыре вершины, необходимо проверить на компланарность. elif", "len(poly.vertices) == 4: # Если полигон компланарный if check_coplanar(obj, poly): add_polygons_to_dict(named_coplanar, poly, obj)", "for vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) for collision_node in", "на треугольники. elif len(poly.vertices) >= 4: not_quad.append(poly) ######################## ######################## group = NodePath(obj.name) collision_node_dict", "texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[0]].uv[0], obj.data.uv_layers[name].data[triangle.loops[0]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[0]) if name == 'color':", "# Создаем колонку для цвета c именем по умолчанию. geom_vertex_format.add_column(\"color\", 4, Geom.NT_uint8, Geom.C_color)", "заполнено, то объеденяем в один файл. if not context.scene.hatcher.file_name_selected == '': # Создаем", "полигоны столкновения из треугольников. for name in named_triangles: for poly in named_triangles[name]: for", "if obj.type == \"CAMERA\": if obj.data.type != 'PANO': create_object = camera_create # Если", "poly): coplanar.append(poly) else: not_coplanar.append(poly) # Если у полигона более четырех вершин, необходимо разбить", "for vertext in triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) # Нужно", "collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.into_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask,", "\"Collision\": create_object = collision_polygon_create # Если объект является источником цвета. if obj.type ==", "= \"ui.export_selected\" bl_label = \"Generator_selected\" def execute(self, context): start_time = datetime.now() context.view_layer.update() #", "четырех сторон на треугольники. for poly in not_quad: for vertext in triangle_poly(poly, obj).values():", "True elif len(poly.vertices) >= 3: v1 = obj.data.vertices[poly.vertices[1]].co - obj.data.vertices[poly.vertices[0]].co v2 = obj.data.vertices[poly.vertices[2]].co", "== \"CS_zup_right\": lens.set_coordinate_system(CS_zup_right) if obj.hatcher.coordinate_system == \"CS_yup_right\": lens.set_coordinate_system(CS_yup_right) if obj.hatcher.coordinate_system == \"CS_zup_left\": lens.set_coordinate_system(CS_zup_left)", "triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[0]) if name == 'texcoord':", "camera_create # Если есть родитель. if not parent: npp = NodePath(create_object(obj, scene)) #npp.setName(obj.name)", "= True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def check_coplanar(obj, poly): status = False # Если вершины", "for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]), name) vertext_quad = []", "путь проекта и относительную директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_other) # Проверяем существует", "named_not_coplanar = {} named_not_quad = {} triangles = [] coplanar = [] not_coplanar", "from_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.from_mask_1.decode('utf-8'), obj.hatcher.from_mask_2.decode('utf-8'), obj.hatcher.from_mask_3.decode('utf-8'), obj.hatcher.from_mask_4.decode('utf-8'), obj.hatcher.from_mask_5.decode('utf-8'), obj.hatcher.from_mask_6.decode('utf-8'), obj.hatcher.from_mask_7.decode('utf-8'), obj.hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask", "координат c именем по умолчанию. geom_vertex_format.add_column(\"texcoord\", 2, Geom.NT_float32, Geom.C_texcoord) # Так же создаем", "import distance_point_to_plane ostream = Notify.out() list_object_support = {'MESH': False, 'PERSP': False, 'ORTHO': False,", "obj): # Если нет такого ключа в словаре. if not obj.data.materials[poly.material_index].name in dict_named:", "4: not_quad.append(poly) else: # Если полигон из трех вершин, проверка на компланарность не", "== obj.data.vertex_colors.active.name: color_vertex_list[col.name] = GeomVertexWriter(vdata, 'color.{}'.format(col.name)) # Если используются координаты текстур. if texcoord:", "= {} vertext_quad = [] # Создаем полигоны столкновения из треугольников. for name", "# Создаем корень для объединения. root = ModelRoot('{}.bam'.format(context.scene.name)) # Пройдем по всем объектом", "if name in collision_node_dict: collision_node_dict[name].add_solid(quad) else: collision_node = CollisionNode(name) collision_node.add_solid(quad) collision_node_dict[name] = collision_node", "obj) recurse(obj, obj.parent) return root.node().getChild(0) import os from datetime import datetime class ExportObject(bpy.types.Operator):", "obj.data.vertex_colors.active: color = True # Создаем колонку для цвета c именем по умолчанию.", "всех. def recurse(obj, parent): # Переменая которая содережит функцию необходимую для экспорта данного", "file.getWriter() writer.writeObject(obj) writer.flush() file.close() def conversion_transform(obj): pos = Point3(*obj.matrix_world.translation) quat = LQuaternion(*obj.matrix_world.to_quaternion()) scale", "if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[0]].uv[0], obj.data.uv_layers.active.data[triangle.loops[0]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[0]].uv[0], obj.data.uv_layers[name].data[triangle.loops[0]].uv[1]) if color: for name", "icon = icon) def checkcreate_dirs(path_project_save): # Проверяем существует ли директория, если нет то", "geom_vertex_format[2] vdata = GeomVertexData(obj.data.name, geom_vertex_format[0], Geom.UHStatic) vdata.set_num_rows(len(obj.data.vertices)) vertex_position = GeomVertexWriter(vdata, 'vertex') normal_vertex =", "ключ и список. dict_named[obj.data.materials[poly.material_index].name] = [poly] else: # Если есть такой ключ, добавляем", "not obj.data.materials[poly.material_index].name in dict_named: # Дабавляем ключ и список. dict_named[obj.data.materials[poly.material_index].name] = [poly] else:", "словаре. if not obj.data.materials[poly.material_index].name in dict_named: # Дабавляем ключ и список. dict_named[obj.data.materials[poly.material_index].name] =", "not_coplanar = [] for poly in obj.data.polygons: if not check_coplanar(obj, poly): not_coplanar.append(poly) for", "elif len(poly.vertices) == 4: # Если полигон компланарный if check_coplanar(obj, poly): add_polygons_to_dict(named_coplanar, poly,", "NodePath(create_object(obj, scene)) #npp.setName(obj.name) #npp.show() npp.reparentTo(root) npp.set_transform(root, conversion_transform(obj)) else: # Если нет родителя. np", "completed, time: {}'.format(datetime.now() - start_time), \"Message\") return {'FINISHED'} class CheckingCoplanarity(bpy.types.Operator): bl_idname = \"ui.check_coplanarity\"", "camera.active = obj.hatcher.camera_active bit = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.draw_mask_1.decode('utf-8'), obj.hatcher.draw_mask_2.decode('utf-8'), obj.hatcher.draw_mask_3.decode('utf-8'), obj.hatcher.draw_mask_4.decode('utf-8'), obj.hatcher.draw_mask_5.decode('utf-8'), obj.hatcher.draw_mask_6.decode('utf-8'), obj.hatcher.draw_mask_7.decode('utf-8'), obj.hatcher.draw_mask_8.decode('utf-8'))", "Проверим есть ли данный тип объекта среди поддерживаемых. if obj.type in list_object_support: #", "obj.data.vertices[poly.vertices[0]].co v2 = obj.data.vertices[poly.vertices[2]].co - obj.data.vertices[poly.vertices[0]].co for index in poly.vertices[3:]: if abs(distance_point_to_plane(obj.data.vertices[index].co, obj.data.vertices[poly.vertices[0]].co,", "'PANO': node = build_hierarchy(obj, context.scene) # Объединяем путь директории и имя файла. path_save", "для координат c именем по умолчанию. geom_vertex_format.add_column(\"texcoord\", 2, Geom.NT_float32, Geom.C_texcoord) # Так же", "obj.data.vertex_colors.active.name: color_vertex_list[col.name] = GeomVertexWriter(vdata, 'color.{}'.format(col.name)) # Если используются координаты текстур. if texcoord: texcoord_vertex_list", "datetime.now() context.view_layer.update() # Перебираем список выбранных объектов. for obj in context.selected_objects: # Объединяем", "in context.scene.objects: # Нас интересуют объекты только без родителя. if not obj.parent: #", "lens.set_film_size(abs(frame_size[0][0]) + abs(frame_size[1][0]), abs(frame_size[0][1]) + abs(frame_size[1][1])) lens.set_focal_length(abs(frame_size[0][2])) lens.set_near_far(obj.data.clip_start, obj.data.clip_end) if obj.hatcher.coordinate_system == \"CS_default\":", "= CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) # Нужно разбить полигоны у которых более четырех", "\"LIGHT\" # Если объект является камерой. if obj.type == \"CAMERA\": if obj.data.type !=", "Если есть родитель. if not parent: npp = NodePath(create_object(obj, scene)) #npp.setName(obj.name) #npp.show() npp.reparentTo(root)", "create_object = None # Если объект является сеткой. if obj.type == \"MESH\": if", "vertext[1], vertext[2]) collision_node.add_solid(quad) # Нужно разбить полигоны у которых более четырех сторон на", "полигона четыре вершины, необходимо проверить на компланарность. elif len(poly.vertices) == 4: # Если", "poly in coplanar: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2],", "quad = CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) from_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.from_mask_1.decode('utf-8'), obj.hatcher.from_mask_2.decode('utf-8'), obj.hatcher.from_mask_3.decode('utf-8'), obj.hatcher.from_mask_4.decode('utf-8'), obj.hatcher.from_mask_5.decode('utf-8'),", "root = NodePath(\"root\") # Выполним рекурсию, для поиска всех. def recurse(obj, parent): #", "выбранных объектов. for obj in context.selected_objects: # Проверим есть ли данный тип объекта", ">= 3: v1 = obj.data.vertices[poly.vertices[1]].co - obj.data.vertices[poly.vertices[0]].co v2 = obj.data.vertices[poly.vertices[2]].co - obj.data.vertices[poly.vertices[0]].co for", "os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) show_message_box('Export selected, completed, time: {}'.format(datetime.now() - start_time), \"Message\") return", "create_object = collision_polygon_create # Если объект является источником цвета. if obj.type == \"LIGHT\":", "collision_node_dict[name].add_solid(quad) else: collision_node = CollisionNode(name) collision_node.add_solid(quad) collision_node_dict[name] = collision_node def collision_polygon_create(obj, scene): named_triangles", "Point3, TransformState, LQuaternion from panda3d.core import Camera, PerspectiveLens, OrthographicLens, CS_default, CS_zup_right, CS_yup_right, CS_zup_left,", "полигоны, на треугольники. for poly in named_not_coplanar[name]: for vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict,", "obj.type in list_object_support: # Если есть ли подтип. if list_object_support[obj.type]: if not obj.data.type", "= \"Message Box\", icon = 'INFO'): def draw(self, context): self.layout.label(text = message) bpy.context.window_manager.popup_menu(draw,", "Нас интересуют объекты только без родителя. if not obj.parent: # Проверим есть ли", "path_save = os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) show_message_box('Export selected, completed, time: {}'.format(datetime.now() - start_time),", "for poly in named_not_coplanar[name]: for vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]),", "obj.data.materials[collision_node.name].hatcher.visibility_collision_polygons: node_path.show() collision_node = CollisionNode(obj.name) # Создаем полигоны столкновения из треугольников. for poly", "create_object = \"LIGHT\" # Если объект является камерой. if obj.type == \"CAMERA\": if", "#np.show() # Проверяем есть ли такой объект в иерархии. result = root.find('**/{}'.format(parent.name)) if", "= [] for poly in obj.data.polygons: if len(poly.vertices) >= 5: not_quad.append(poly) for i", "color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[2]].color[0], obj.data.vertex_colors[name].data[triangle.loops[2]].color[1], obj.data.vertex_colors[name].data[triangle.loops[2]].color[2], obj.data.vertex_colors[name].data[triangle.loops[2]].color[3]) list_vertext[triangle.loops[2]] = None # Добавляем вершины в примитив. prim.addVertices(triangle.loops[0],", "CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) # Создаем полигоны столкновения из многоугольников. for name in", "len(poly.vertices) == 3: for index in poly.vertices[2:]: triangles.append(poly) # Если у полигона четыре", "def checkcreate_dirs(path_project_save): # Проверяем существует ли директория, если нет то создаем. if not", "return {'FINISHED'} class CheckingQuad(bpy.types.Operator): bl_idname = \"ui.check_quad\" bl_label = \"Checking_quad\" def execute(self, context):", "без родителя. if not obj.parent: # Проверим есть ли данный тип объекта среди", "range(triangulator3.getNumTriangles()): v0 = triangulator3.get_vertex(triangulator3.get_triangle_v0(i)) v1 = triangulator3.get_vertex(triangulator3.get_triangle_v1(i)) v2 = triangulator3.get_vertex(triangulator3.get_triangle_v2(i)) trangle[i] = ((v0[0],", "collision_node def collision_polygon_create(obj, scene): named_triangles = {} named_coplanar = {} named_not_coplanar = {}", "объектом в сцене. for obj in context.scene.objects: # Нас интересуют объекты только без", "v1.cross(v2))) < 1e-6: status = True else: status = False return status def", "elif len(poly.vertices) >= 3: v1 = obj.data.vertices[poly.vertices[1]].co - obj.data.vertices[poly.vertices[0]].co v2 = obj.data.vertices[poly.vertices[2]].co -", "obj): file = BamFile() file.openWrite(Filename.fromOsSpecific(path_save + '.bam')) writer: BamWriter = file.getWriter() writer.writeObject(obj) writer.flush()", "texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[2]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[2]].uv[0], obj.data.uv_layers.active.data[triangle.loops[2]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[2]].uv[0], obj.data.uv_layers[name].data[triangle.loops[2]].uv[1]) if color:", "obj.data.materials[collision_node.name].hatcher.from_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.into_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_3.decode('utf-8'),", "list_object_support[obj.type]: if not obj.data.type == 'PANO': node = build_hierarchy(obj, context.scene) # Объединяем путь", "#np.setName(obj.name) #np.show() # Проверяем есть ли такой объект в иерархии. result = root.find('**/{}'.format(parent.name))", "существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save) # Объединяем путь директории и", "bl_idname = \"ui.check_coplanarity\" bl_label = \"Checking_coplanarity\" def execute(self, context): select_not_coplanar(context.object) return {'FINISHED'} class", "директорию модели. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, obj.hatcher.rel_path_object) # Проверяем существует ли директория, если нет", "if obj.data.type == 'ORTHO': lens = OrthographicLens() lens.set_film_size(abs(frame_size[0][0]) + abs(frame_size[1][0]), abs(frame_size[0][1]) + abs(frame_size[1][1]))", "копланарен. if len(poly.vertices) == 3: status = True elif len(poly.vertices) >= 3: v1", "not_coplanar = [] not_quad = [] # Перебираем полигоны объекта. for poly in", "CS_yup_left, CS_invalid from panda3d.core import GeomVertexArrayFormat, Geom, GeomVertexFormat, GeomVertexData, GeomVertexWriter, Triangulator3, GeomTriangles from", "if not col.name == obj.data.vertex_colors.active.name: geom_vertex_format.add_column('color.{}'.format(col.name), 4, Geom.NT_uint8, Geom.C_color) # Проверка есть ли", "not_coplanar: for vertext in triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) #", "None # Если объект является сеткой. if obj.type == \"MESH\": if obj.hatcher.type_mesh ==", "совподает с активным. if not col.name == obj.data.vertex_colors.active.name: color_vertex_list[col.name] = GeomVertexWriter(vdata, 'color.{}'.format(col.name)) #", "= scene) if obj.data.type == 'PERSP': lens = PerspectiveLens() if obj.data.type == 'ORTHO':", "in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]), name) vertext_quad = [] # Создаем", "CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]), name) vertext_quad = [] # Создаем полигоны столкновения из компланарных", "\"Message\") return {'FINISHED'} class ExportSelected(bpy.types.Operator): bl_idname = \"ui.export_selected\" bl_label = \"Generator_selected\" def execute(self,", "столкновения из треугольников. for name in named_triangles: for poly in named_triangles[name]: for index", "camera.set_lens(lens) return camera def build_hierarchy(obj, scene): # Узел для формирование иерархии root =", "более четырех вершин, необходимо разбить на треугольники. elif len(poly.vertices) >= 4: not_quad.append(poly) ########################", "3, Geom.NT_float32, Geom.C_normal) # Проверка есть ли цвета вершин у объекта. if obj.data.vertex_colors.active:", "= None # Если объект является сеткой. if obj.type == \"MESH\": if obj.hatcher.type_mesh", "obj.data.vertices[triangle.vertices[2]].co[1], obj.data.vertices[triangle.vertices[2]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[2]].normal[0], obj.data.vertices[triangle.vertices[2]].normal[1], obj.data.vertices[triangle.vertices[2]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord:", "collision_node.add_solid(quad) collision_node_dict[name] = collision_node def collision_polygon_create(obj, scene): named_triangles = {} named_coplanar = {}", "компланарный if check_coplanar(obj, poly): add_polygons_to_dict(named_coplanar, poly, obj) else: add_polygons_to_dict(named_not_coplanar, poly, obj) # Если", "детям. for child in obj.children: recurse(child, obj) recurse(obj, obj.parent) return root.node().getChild(0) import os", "index in poly.vertices: triangulator3.add_polygon_vertex(index_tr) triangulator3.add_vertex(*obj.data.vertices[index].co) index_tr += 1 triangulator3.triangulate() for i in range(triangulator3.getNumTriangles()):", "формат. end_format = GeomVertexFormat.registerFormat(my_format) return end_format, color, texcoord def geom_create(obj): geom_vertex_format = get_format(obj)", "in not_coplanar: poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def triangle_poly(poly, obj): trangle = {}", "else: add_polygons_to_dict(named_not_coplanar, poly, obj) # Если у полигона более четырех вершин, необходимо разбить", "# Если нет родителя. np = NodePath(create_object(obj, scene)) #np.setName(obj.name) #np.show() # Проверяем есть", "\"Render\": create_object = geom_node_create if obj.hatcher.type_mesh == \"Collision\": create_object = collision_polygon_create # Если", "len(poly.vertices) >= 4: not_quad.append(poly) ######################## ######################## group = NodePath(obj.name) collision_node_dict = {} vertext_quad", "4, Geom.NT_uint8, Geom.C_color) # Проверка есть ли активные текстурные координаты у объекта. if", "triangles = [] coplanar = [] not_coplanar = [] not_quad = [] #", "'PANO': create_object = camera_create # Если есть родитель. if not parent: npp =", ">= 4: add_polygons_to_dict(named_not_quad, poly, obj) # Если нет материала, то рассортировываем по спискам", "np = NodePath(create_object(obj, scene)) #np.setName(obj.name) #np.show() # Проверяем есть ли такой объект в", "{} vertext_quad = [] # Создаем полигоны столкновения из треугольников. for name in", "2)) # Если полигон столкновения содержит тела. if collision_node.getNumSolids() >= 1: node_path =", "подтип. if list_object_support[obj.type]: if not obj.data.type == 'PANO': node = build_hierarchy(obj, context.scene) #", "poly in named_triangles[name]: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]), name)", "сторон на треугольники. for poly in not_quad: for vertext in triangle_poly(poly, obj).values(): quad", "mesh = obj.data mesh.calc_loop_triangles() # Сюда записиваются индексы обработаных вершин. list_vertext = {}", "class ExportScene(bpy.types.Operator): bl_idname = \"ui.export_scene\" bl_label = \"Generator_scene\" def execute(self, context): start_time =", "scene): named_triangles = {} named_coplanar = {} named_not_coplanar = {} named_not_quad = {}", "triangle.loops[0] in list_vertext: vertex_position.set_row(triangle.loops[0]) normal_vertex.set_row(triangle.loops[0]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[0]].co[0], obj.data.vertices[triangle.vertices[0]].co[1], obj.data.vertices[triangle.vertices[0]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[0]].normal[0], obj.data.vertices[triangle.vertices[0]].normal[1], obj.data.vertices[triangle.vertices[0]].normal[2])", "стоит флажок показывать полигон столкновения. if obj.hatcher.visibility_collision_polygons: node_path.show() return group.node().getChild(0) def geom_node_create(obj, scene):", "i in range(triangulator3.getNumTriangles()): v0 = triangulator3.get_vertex(triangulator3.get_triangle_v0(i)) v1 = triangulator3.get_vertex(triangulator3.get_triangle_v1(i)) v2 = triangulator3.get_vertex(triangulator3.get_triangle_v2(i)) trangle[i]", "obj.hatcher.draw_mask_8.decode('utf-8')) camera.camera_mask = int(bit, 2) camera.set_lens(lens) return camera def build_hierarchy(obj, scene): # Узел", "существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save) # Создаем корень для объединения.", "######################## group = NodePath(obj.name) collision_node_dict = {} vertext_quad = [] # Создаем полигоны", "ли подтип. if list_object_support[obj.type]: if not obj.data.type == 'PANO': node = build_hierarchy(obj, context.scene)", "v0[2]), (v1[0], v1[1], v1[2]), (v2[0], v2[1], v2[2])) return trangle def add_polygons_to_dict(dict_named, poly, obj):", "объект является источником цвета. if obj.type == \"LIGHT\": create_object = \"LIGHT\" # Если", "for i in obj.data.edges: i.select=False for i in obj.data.polygons: i.select = False for", "else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[0]].uv[0], obj.data.uv_layers[name].data[triangle.loops[0]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[0]) if name ==", "== 4: if check_coplanar(obj, poly): coplanar.append(poly) else: not_coplanar.append(poly) # Если у полигона более", "ExportScene(bpy.types.Operator): bl_idname = \"ui.export_scene\" bl_label = \"Generator_scene\" def execute(self, context): start_time = datetime.now()", "scene)) #np.setName(obj.name) #np.show() # Проверяем есть ли такой объект в иерархии. result =", "имя файла. path_save = os.path.join(path_project_save, context.scene.hatcher.file_name_selected) bam_writer_file(path_save, root) # Если нет, то раздельно.", "четыре вершины, необходимо проверить на компланарность. elif len(poly.vertices) == 4: if check_coplanar(obj, poly):", "пуст. if len(obj.data.materials) >= 1: # Если есть слот материала и он содержит", "not_quad: poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def check_coplanar(obj, poly): status = False #", "= 0 for index in poly.vertices: triangulator3.add_polygon_vertex(index_tr) triangulator3.add_vertex(*obj.data.vertices[index].co) index_tr += 1 triangulator3.triangulate() for", "for index in poly.vertices[2:]: triangles.append(poly) # Если у полигона четыре вершины, необходимо проверить", "texcoord_vertex_list[name].set_row(triangle.loops[1]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[1]].uv[0], obj.data.uv_layers.active.data[triangle.loops[1]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[1]].uv[0], obj.data.uv_layers[name].data[triangle.loops[1]].uv[1]) if color: for", "normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[0]) if name ==", "vertext_quad[2], vertext_quad[3]) collision_node.add_solid(quad) vertext_quad = [] # Нужно разбить некомпланарные полигоны, на треугольники.", "{}'.format(datetime.now() - start_time), \"Message\") return {'FINISHED'} class ExportSelected(bpy.types.Operator): bl_idname = \"ui.export_selected\" bl_label =", "для формирование иерархии root = NodePath(\"root\") # Выполним рекурсию, для поиска всех. def", "len(obj.data.materials) >= 1: # Если есть слот материала и он содержит имя, рассортировываем", "Если у полигона более четырех вершин, необходимо разбить на треугольники. elif len(poly.vertices) >=", "GeomVertexWriter, Triangulator3, GeomTriangles from panda3d.core import GeomNode, PandaNode, NodePath, ModelRoot from panda3d.core import", "def get_format(obj): color = False texcoord = False # Создаем новый массив. geom_vertex_format", "= '{}{}{}{}{}{}{}{}'.format(obj.hatcher.draw_mask_1.decode('utf-8'), obj.hatcher.draw_mask_2.decode('utf-8'), obj.hatcher.draw_mask_3.decode('utf-8'), obj.hatcher.draw_mask_4.decode('utf-8'), obj.hatcher.draw_mask_5.decode('utf-8'), obj.hatcher.draw_mask_6.decode('utf-8'), obj.hatcher.draw_mask_7.decode('utf-8'), obj.hatcher.draw_mask_8.decode('utf-8')) camera.camera_mask = int(bit, 2)", "root) # Если нет, то раздельно. else: # Перебираем список выбранных объектов. for", "from datetime import datetime class ExportObject(bpy.types.Operator): bl_idname = \"ui.export_object\" bl_label = \"Generator_object\" def", "столкновения из некомпланарных прямольников. for name in named_not_coplanar: # Нужно разбить некомпланарные полигоны,", "for poly in named_not_quad[name]: for vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]),", "Нужно разбить некомпланарные полигоны, на треугольники. for poly in not_coplanar: for vertext in", "for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[2]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[2]].color[0], obj.data.vertex_colors.active.data[triangle.loops[2]].color[1], obj.data.vertex_colors.active.data[triangle.loops[2]].color[2], obj.data.vertex_colors.active.data[triangle.loops[2]].color[3])", "CheckingQuad(bpy.types.Operator): bl_idname = \"ui.check_quad\" bl_label = \"Checking_quad\" def execute(self, context): select_not_quad(context.object) return {'FINISHED'}", "collision_node = CollisionNode(name) collision_node.add_solid(quad) collision_node_dict[name] = collision_node def collision_polygon_create(obj, scene): named_triangles = {}", "texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[1]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[1]].uv[0], obj.data.uv_layers.active.data[triangle.loops[1]].uv[1]) else:", "distance_point_to_plane ostream = Notify.out() list_object_support = {'MESH': False, 'PERSP': False, 'ORTHO': False, 'CAMERA':True}", "name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[2]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[2]].color[0], obj.data.vertex_colors.active.data[triangle.loops[2]].color[1], obj.data.vertex_colors.active.data[triangle.loops[2]].color[2], obj.data.vertex_colors.active.data[triangle.loops[2]].color[3]) else:", "BamWriter = file.getWriter() writer.writeObject(obj) writer.flush() file.close() def conversion_transform(obj): pos = Point3(*obj.matrix_world.translation) quat =", "Нужно разбить многоугольники на треугольники. for poly in named_not_quad[name]: for vertext in triangle_poly(poly,", "# Если имя не совподает с активным. if not col.name == obj.data.vertex_colors.active.name: geom_vertex_format.add_column('color.{}'.format(col.name),", "select_not_quad(obj): not_quad = [] for poly in obj.data.polygons: if len(poly.vertices) >= 5: not_quad.append(poly)", "obj.data.vertex_colors.active.data[triangle.loops[2]].color[1], obj.data.vertex_colors.active.data[triangle.loops[2]].color[2], obj.data.vertex_colors.active.data[triangle.loops[2]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[2]].color[0], obj.data.vertex_colors[name].data[triangle.loops[2]].color[1], obj.data.vertex_colors[name].data[triangle.loops[2]].color[2], obj.data.vertex_colors[name].data[triangle.loops[2]].color[3]) list_vertext[triangle.loops[2]] = None # Добавляем", "\"MESH\": if obj.hatcher.type_mesh == \"Render\": create_object = geom_node_create if obj.hatcher.type_mesh == \"Collision\": create_object", "разбить на треугольники. elif len(poly.vertices) >= 4: not_quad.append(poly) ######################## ######################## group = NodePath(obj.name)", "четырех вершин, необходимо разбить на треугольники. elif len(poly.vertices) >= 4: not_quad.append(poly) else: #", "Geom.NT_float32, Geom.C_texcoord) # Создаем формат. my_format = GeomVertexFormat() my_format.addArray(geom_vertex_format) # Регистрируем формат. end_format", "poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def check_coplanar(obj, poly): status = False # Если", "i.select = False for poly in not_quad: poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def", "GeomTriangles from panda3d.core import GeomNode, PandaNode, NodePath, ModelRoot from panda3d.core import BamFile, BamWriter,", "transform = TransformState.make_pos_quat_scale(pos, quat, scale) return transform def get_format(obj): color = False texcoord", "по умолчанию. geom_vertex_format.add_column(\"color\", 4, Geom.NT_uint8, Geom.C_color) # Так же создаем дополнительные колонки. for", "== \"Collision\": create_object = collision_polygon_create # Если объект является источником цвета. if obj.type", "файла. path_save = os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) else: node = build_hierarchy(obj, context.scene) #", "= build_hierarchy(obj, context.scene) root = ModelRoot('{}.bam'.format(obj.name)) root.add_child(node) bam_writer_file(path_save, root) show_message_box('Export object: {} completed,", "'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[1]].uv[0], obj.data.uv_layers.active.data[triangle.loops[1]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[1]].uv[0], obj.data.uv_layers[name].data[triangle.loops[1]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[1])", "obj.hatcher.camera_active bit = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.draw_mask_1.decode('utf-8'), obj.hatcher.draw_mask_2.decode('utf-8'), obj.hatcher.draw_mask_3.decode('utf-8'), obj.hatcher.draw_mask_4.decode('utf-8'), obj.hatcher.draw_mask_5.decode('utf-8'), obj.hatcher.draw_mask_6.decode('utf-8'), obj.hatcher.draw_mask_7.decode('utf-8'), obj.hatcher.draw_mask_8.decode('utf-8')) camera.camera_mask =", "obj.hatcher.from_mask_7.decode('utf-8'), obj.hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.into_mask_1.decode('utf-8'), obj.hatcher.into_mask_2.decode('utf-8'), obj.hatcher.into_mask_3.decode('utf-8'), obj.hatcher.into_mask_4.decode('utf-8'), obj.hatcher.into_mask_5.decode('utf-8'), obj.hatcher.into_mask_6.decode('utf-8'), obj.hatcher.into_mask_7.decode('utf-8'),", "имя файла. path_save = os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) show_message_box('Export selected, completed, time: {}'.format(datetime.now()", "= False return status def select_not_coplanar(obj): not_coplanar = [] for poly in obj.data.polygons:", "Создаем полигоны столкновения из треугольников. for poly in triangles: for index in poly.vertices:", "start_time = datetime.now() context.view_layer.update() # Перебираем список выбранных объектов. for obj in context.selected_objects:", "вершины три, это значит полигон автоматически копланарен. if len(poly.vertices) == 3: status =", "4: not_quad.append(poly) ######################## ######################## group = NodePath(obj.name) collision_node_dict = {} vertext_quad = []", "file.close() def conversion_transform(obj): pos = Point3(*obj.matrix_world.translation) quat = LQuaternion(*obj.matrix_world.to_quaternion()) scale = Point3(*obj.matrix_world.to_scale()) transform", "# Если список материалов не пуст. if len(obj.data.materials) >= 1: # Если есть", "obj.data.materials[collision_node.name].hatcher.into_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) node_path = NodePath(collision_node) node_path.reparentTo(group) if", "group.node().getChild(0) def geom_node_create(obj, scene): geom = geom_create(obj) geom_node = GeomNode(obj.data.name) geom_node.addGeom(geom) return geom_node", "lens.set_coordinate_system(CS_yup_right) if obj.hatcher.coordinate_system == \"CS_zup_left\": lens.set_coordinate_system(CS_zup_left) if obj.hatcher.coordinate_system == \"CS_yup_left\": lens.set_coordinate_system(CS_yup_left) if obj.hatcher.coordinate_system", "Создаем новый массив. geom_vertex_format = GeomVertexArrayFormat() # Создаем колонку для вершин. geom_vertex_format.add_column(\"vertex\", 3,", "на компланарность не нужна. if len(poly.vertices) == 3: for index in poly.vertices[2:]: triangles.append(poly)", "vertex_position.add_data3(obj.data.vertices[triangle.vertices[0]].co[0], obj.data.vertices[triangle.vertices[0]].co[1], obj.data.vertices[triangle.vertices[0]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[0]].normal[0], obj.data.vertices[triangle.vertices[0]].normal[1], obj.data.vertices[triangle.vertices[0]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if", "bam_writer_file(path_save, root) # Если нет, то раздельно. else: # Перебираем список выбранных объектов.", "колонки. for col in obj.data.vertex_colors: # Если имя не совподает с активным. if", "collision_node in collision_node_dict.values(): from_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.from_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_8.decode('utf-8'))", "not col.name == obj.data.vertex_colors.active.name: color_vertex_list[col.name] = GeomVertexWriter(vdata, 'color.{}'.format(col.name)) # Если используются координаты текстур.", "= root.find('**/{}'.format(parent.name)) if result: np.reparentTo(result) np.set_transform(root, conversion_transform(obj)) else: np.reparentTo(root) np.set_transform(root, conversion_transform(obj)) # Проходим", "bl_label = \"Generator_scene\" def execute(self, context): start_time = datetime.now() context.view_layer.update() # Объединяем путь", "texcoord = False # Создаем новый массив. geom_vertex_format = GeomVertexArrayFormat() # Создаем колонку", "not_quad.append(poly) ######################## ######################## group = NodePath(obj.name) collision_node_dict = {} vertext_quad = [] #", "icon = 'INFO'): def draw(self, context): self.layout.label(text = message) bpy.context.window_manager.popup_menu(draw, title = title,", "= {} named_not_coplanar = {} named_not_quad = {} triangles = [] coplanar =", "in list_vertext: vertex_position.set_row(triangle.loops[2]) normal_vertex.set_row(triangle.loops[2]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[2]].co[0], obj.data.vertices[triangle.vertices[2]].co[1], obj.data.vertices[triangle.vertices[2]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[2]].normal[0], obj.data.vertices[triangle.vertices[2]].normal[1], obj.data.vertices[triangle.vertices[2]].normal[2]) else:", "2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.into_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2))", "'{}{}{}{}{}{}{}{}'.format(obj.hatcher.into_mask_1.decode('utf-8'), obj.hatcher.into_mask_2.decode('utf-8'), obj.hatcher.into_mask_3.decode('utf-8'), obj.hatcher.into_mask_4.decode('utf-8'), obj.hatcher.into_mask_5.decode('utf-8'), obj.hatcher.into_mask_6.decode('utf-8'), obj.hatcher.into_mask_7.decode('utf-8'), obj.hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) # Если полигон", "obj.hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.into_mask_1.decode('utf-8'), obj.hatcher.into_mask_2.decode('utf-8'), obj.hatcher.into_mask_3.decode('utf-8'), obj.hatcher.into_mask_4.decode('utf-8'), obj.hatcher.into_mask_5.decode('utf-8'), obj.hatcher.into_mask_6.decode('utf-8'), obj.hatcher.into_mask_7.decode('utf-8'), obj.hatcher.into_mask_8.decode('utf-8'))", "in obj.data.polygons: i.select = False for poly in not_quad: poly.select = True bpy.ops.object.mode_set(mode='EDIT')", "title = \"Message Box\", icon = 'INFO'): def draw(self, context): self.layout.label(text = message)", "Если полигон из трех вершин, проверка на компланарность не нужна. if len(poly.vertices) ==", "# Если у полигона четыре вершины, необходимо проверить на компланарность. elif len(poly.vertices) ==", "obj.hatcher.from_mask_3.decode('utf-8'), obj.hatcher.from_mask_4.decode('utf-8'), obj.hatcher.from_mask_5.decode('utf-8'), obj.hatcher.from_mask_6.decode('utf-8'), obj.hatcher.from_mask_7.decode('utf-8'), obj.hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.into_mask_1.decode('utf-8'), obj.hatcher.into_mask_2.decode('utf-8'), obj.hatcher.into_mask_3.decode('utf-8'),", "некомпланарные полигоны, на треугольники. for poly in named_not_coplanar[name]: for vertext in triangle_poly(poly, obj).values():", "named_not_quad: # Нужно разбить многоугольники на треугольники. for poly in named_not_quad[name]: for vertext", "triangle.loops[2] in list_vertext: vertex_position.set_row(triangle.loops[2]) normal_vertex.set_row(triangle.loops[2]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[2]].co[0], obj.data.vertices[triangle.vertices[2]].co[1], obj.data.vertices[triangle.vertices[2]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[2]].normal[0], obj.data.vertices[triangle.vertices[2]].normal[1], obj.data.vertices[triangle.vertices[2]].normal[2])", "for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[1]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[1]].color[0], obj.data.vertex_colors.active.data[triangle.loops[1]].color[1], obj.data.vertex_colors.active.data[triangle.loops[1]].color[2], obj.data.vertex_colors.active.data[triangle.loops[1]].color[3])", "poly in triangles: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2])", "и относительную директорию модели. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, obj.hatcher.rel_path_object) # Проверяем существует ли директория,", "c именем по умолчанию. geom_vertex_format.add_column(\"color\", 4, Geom.NT_uint8, Geom.C_color) # Так же создаем дополнительные", "panda3d.core import Camera, PerspectiveLens, OrthographicLens, CS_default, CS_zup_right, CS_yup_right, CS_zup_left, CS_yup_left, CS_invalid from panda3d.core", "obj.data.vertices: i.select=False for i in obj.data.edges: i.select=False for i in obj.data.polygons: i.select =", "'{}{}{}{}{}{}{}{}'.format(obj.hatcher.from_mask_1.decode('utf-8'), obj.hatcher.from_mask_2.decode('utf-8'), obj.hatcher.from_mask_3.decode('utf-8'), obj.hatcher.from_mask_4.decode('utf-8'), obj.hatcher.from_mask_5.decode('utf-8'), obj.hatcher.from_mask_6.decode('utf-8'), obj.hatcher.from_mask_7.decode('utf-8'), obj.hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.into_mask_1.decode('utf-8'),", "geom_vertex_format[0], Geom.UHStatic) vdata.set_num_rows(len(obj.data.vertices)) vertex_position = GeomVertexWriter(vdata, 'vertex') normal_vertex = GeomVertexWriter(vdata, 'normal') # Если", "obj.data.uv_layers.active.name: geom_vertex_format.add_column('texcoord.{}'.format(uv.name), 2, Geom.NT_float32, Geom.C_texcoord) # Создаем формат. my_format = GeomVertexFormat() my_format.addArray(geom_vertex_format) #", "color_vertex_list[col.name] = GeomVertexWriter(vdata, 'color.{}'.format(col.name)) # Если используются координаты текстур. if texcoord: texcoord_vertex_list =", "texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[0]].uv[0], obj.data.uv_layers.active.data[triangle.loops[0]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[0]].uv[0], obj.data.uv_layers[name].data[triangle.loops[0]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[0]) if", "not triangle.loops[1] in list_vertext: vertex_position.set_row(triangle.loops[1]) normal_vertex.set_row(triangle.loops[1]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[1]].co[0], obj.data.vertices[triangle.vertices[1]].co[1], obj.data.vertices[triangle.vertices[1]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[1]].normal[0], obj.data.vertices[triangle.vertices[1]].normal[1],", "obj.data.materials[collision_node.name].hatcher.into_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) node_path = NodePath(collision_node) node_path.reparentTo(group) if obj.data.materials[collision_node.name].hatcher.visibility_collision_polygons: node_path.show() collision_node", "== \"CS_default\": lens.set_coordinate_system(CS_default) if obj.hatcher.coordinate_system == \"CS_zup_right\": lens.set_coordinate_system(CS_zup_right) if obj.hatcher.coordinate_system == \"CS_yup_right\": lens.set_coordinate_system(CS_yup_right)", "dict_named[obj.data.materials[poly.material_index].name].append(poly) def colnode_add_dict(collision_node_dict, quad, name): if name in collision_node_dict: collision_node_dict[name].add_solid(quad) else: collision_node =", "in range(triangulator3.getNumTriangles()): v0 = triangulator3.get_vertex(triangulator3.get_triangle_v0(i)) v1 = triangulator3.get_vertex(triangulator3.get_triangle_v1(i)) v2 = triangulator3.get_vertex(triangulator3.get_triangle_v2(i)) trangle[i] =", "компланарность. elif len(poly.vertices) == 4: # Если полигон компланарный if check_coplanar(obj, poly): add_polygons_to_dict(named_coplanar,", "== \"CS_yup_right\": lens.set_coordinate_system(CS_yup_right) if obj.hatcher.coordinate_system == \"CS_zup_left\": lens.set_coordinate_system(CS_zup_left) if obj.hatcher.coordinate_system == \"CS_yup_left\": lens.set_coordinate_system(CS_yup_left)", "message) bpy.context.window_manager.popup_menu(draw, title = title, icon = icon) def checkcreate_dirs(path_project_save): # Проверяем существует", "# Узел для формирование иерархии root = NodePath(\"root\") # Выполним рекурсию, для поиска", "= '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.from_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask =", "вершины, необходимо проверить на компланарность. elif len(poly.vertices) == 4: if check_coplanar(obj, poly): coplanar.append(poly)", "(v2[0], v2[1], v2[2])) return trangle def add_polygons_to_dict(dict_named, poly, obj): # Если нет такого", "в сцене. for obj in context.scene.objects: # Нас интересуют объекты только без родителя.", "# Проверка есть ли цвета вершин у объекта. if obj.data.vertex_colors.active: color = True", "# Нас интересуют объекты только без родителя. if not obj.parent: # Проверим есть", "= GeomVertexWriter(vdata, 'color.{}'.format(col.name)) # Если используются координаты текстур. if texcoord: texcoord_vertex_list = {'texcoord':", "obj.data.vertex_colors.active.data[triangle.loops[2]].color[2], obj.data.vertex_colors.active.data[triangle.loops[2]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[2]].color[0], obj.data.vertex_colors[name].data[triangle.loops[2]].color[1], obj.data.vertex_colors[name].data[triangle.loops[2]].color[2], obj.data.vertex_colors[name].data[triangle.loops[2]].color[3]) list_vertext[triangle.loops[2]] = None # Добавляем вершины", "цвета вершин у объекта. if obj.data.vertex_colors.active: color = True # Создаем колонку для", "not context.scene.hatcher.file_name_selected == '': # Создаем корень для объединения. root = ModelRoot('{}.bam'.format(context.scene.hatcher.file_name_selected)) #", "Проверяем существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save) # Объединяем путь директории", "if list_object_support[obj.type]: if not obj.data.type == 'PANO': node = build_hierarchy(obj, context.scene) # Объединяем", "= TransformState.make_pos_quat_scale(pos, quat, scale) return transform def get_format(obj): color = False texcoord =", "Проходим по треугольниуам. for triangle in mesh.loop_triangles: # Обработка первой вершины. if not", "Узел для формирование иерархии root = NodePath(\"root\") # Выполним рекурсию, для поиска всех.", "checkcreate_dirs(path_project_save) # Создаем корень для объединения. root = ModelRoot('{}.bam'.format(context.scene.name)) # Пройдем по всем", "полигон столкновения. if obj.hatcher.visibility_collision_polygons: node_path.show() return group.node().getChild(0) def geom_node_create(obj, scene): geom = geom_create(obj)", "vertext[2]) collision_node.add_solid(quad) # Нужно разбить полигоны у которых более четырех сторон на треугольники.", "путь проекта и относительную директорию модели. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, obj.hatcher.rel_path_object) # Проверяем существует", "obj.hatcher.coordinate_system == \"CS_yup_left\": lens.set_coordinate_system(CS_yup_left) if obj.hatcher.coordinate_system == \"CS_invalid\": lens.set_coordinate_system(CS_invalid) camera = Camera(obj.data.name) camera.active", "которая содережит функцию необходимую для экспорта данного типа объекта. create_object = None #", "colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]), name) vertext_quad = [] # Создаем полигоны столкновения", "+ abs(frame_size[1][1])) lens.set_focal_length(abs(frame_size[0][2])) lens.set_near_far(obj.data.clip_start, obj.data.clip_end) if obj.hatcher.coordinate_system == \"CS_default\": lens.set_coordinate_system(CS_default) if obj.hatcher.coordinate_system ==", "triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) # Создаем полигоны столкновения из многоугольников.", "if hasattr(obj.data.materials[poly.material_index], 'name'): # Если полигон из трех вершин, проверка на компланарность не", "obj.data.vertices[triangle.vertices[1]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[1]) if", "index in poly.vertices[2:]: triangles.append(poly) # Если у полигона четыре вершины, необходимо проверить на", "GeomVertexArrayFormat, Geom, GeomVertexFormat, GeomVertexData, GeomVertexWriter, Triangulator3, GeomTriangles from panda3d.core import GeomNode, PandaNode, NodePath,", "== obj.data.vertex_colors.active.name: geom_vertex_format.add_column('color.{}'.format(col.name), 4, Geom.NT_uint8, Geom.C_color) # Проверка есть ли активные текстурные координаты", "= GeomTriangles(Geom.UHStatic) prim.makeIndexed() prim.setIndexType(Geom.NT_uint32) mesh = obj.data mesh.calc_loop_triangles() # Сюда записиваются индексы обработаных", "color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[1]].color[0], obj.data.vertex_colors.active.data[triangle.loops[1]].color[1], obj.data.vertex_colors.active.data[triangle.loops[1]].color[2], obj.data.vertex_colors.active.data[triangle.loops[1]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[1]].color[0], obj.data.vertex_colors[name].data[triangle.loops[1]].color[1], obj.data.vertex_colors[name].data[triangle.loops[1]].color[2], obj.data.vertex_colors[name].data[triangle.loops[1]].color[3]) list_vertext[triangle.loops[1]] = None #", "{'FINISHED'} class CheckingCoplanarity(bpy.types.Operator): bl_idname = \"ui.check_coplanarity\" bl_label = \"Checking_coplanarity\" def execute(self, context): select_not_coplanar(context.object)", "vertex_position.set_row(triangle.loops[0]) normal_vertex.set_row(triangle.loops[0]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[0]].co[0], obj.data.vertices[triangle.vertices[0]].co[1], obj.data.vertices[triangle.vertices[0]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[0]].normal[0], obj.data.vertices[triangle.vertices[0]].normal[1], obj.data.vertices[triangle.vertices[0]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1],", "context.scene.hatcher.rel_path_scene) # Проверяем существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save) # Создаем", "colnode_add_dict(collision_node_dict, quad, name): if name in collision_node_dict: collision_node_dict[name].add_solid(quad) else: collision_node = CollisionNode(name) collision_node.add_solid(quad)", "coplanar = [] not_coplanar = [] not_quad = [] # Перебираем полигоны объекта.", "есть ли такой объект в иерархии. result = root.find('**/{}'.format(parent.name)) if result: np.reparentTo(result) np.set_transform(root,", "умолчанию. geom_vertex_format.add_column(\"texcoord\", 2, Geom.NT_float32, Geom.C_texcoord) # Так же создаем дополнительные колонки. for uv", "lens.set_focal_length(abs(frame_size[0][2])) lens.set_near_far(obj.data.clip_start, obj.data.clip_end) if obj.hatcher.coordinate_system == \"CS_default\": lens.set_coordinate_system(CS_default) if obj.hatcher.coordinate_system == \"CS_zup_right\": lens.set_coordinate_system(CS_zup_right)", "vertext_quad[2]), name) vertext_quad = [] # Создаем полигоны столкновения из компланарных прямольников. for", "директория, если нет то создаем. checkcreate_dirs(path_project_save) # Создаем корень для объединения. root =", "for i in range(triangulator3.getNumTriangles()): v0 = triangulator3.get_vertex(triangulator3.get_triangle_v0(i)) v1 = triangulator3.get_vertex(triangulator3.get_triangle_v1(i)) v2 = triangulator3.get_vertex(triangulator3.get_triangle_v2(i))", "if obj.hatcher.type_mesh == \"Render\": create_object = geom_node_create if obj.hatcher.type_mesh == \"Collision\": create_object =", "colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) # Создаем полигоны столкновения из многоугольников. for name", "bam_writer_file(path_save, obj): file = BamFile() file.openWrite(Filename.fromOsSpecific(path_save + '.bam')) writer: BamWriter = file.getWriter() writer.writeObject(obj)", "in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[1]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[1]].uv[0], obj.data.uv_layers.active.data[triangle.loops[1]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[1]].uv[0], obj.data.uv_layers[name].data[triangle.loops[1]].uv[1]) if", "для цвета c именем по умолчанию. geom_vertex_format.add_column(\"color\", 4, Geom.NT_uint8, Geom.C_color) # Так же", "poly): status = False # Если вершины три, это значит полигон автоматически копланарен.", "CS_zup_left, CS_yup_left, CS_invalid from panda3d.core import GeomVertexArrayFormat, Geom, GeomVertexFormat, GeomVertexData, GeomVertexWriter, Triangulator3, GeomTriangles", "== \"Render\": create_object = geom_node_create if obj.hatcher.type_mesh == \"Collision\": create_object = collision_polygon_create #", "not parent: npp = NodePath(create_object(obj, scene)) #npp.setName(obj.name) #npp.show() npp.reparentTo(root) npp.set_transform(root, conversion_transform(obj)) else: #", "prim.makeIndexed() prim.setIndexType(Geom.NT_uint32) mesh = obj.data mesh.calc_loop_triangles() # Сюда записиваются индексы обработаных вершин. list_vertext", "if not obj.parent: # Проверим есть ли данный тип объекта среди поддерживаемых. if", "'normal') # Если используются цвета вершин. if color: color_vertex_list = {'color': GeomVertexWriter(vdata, 'color')}", "in obj.data.polygons: # Если список материалов не пуст. if len(obj.data.materials) >= 1: #", "for poly in not_coplanar: for vertext in triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0], vertext[1],", "директория, если нет то создаем. if not os.path.exists(path_project_save): try: os.makedirs(path_project_save) except OSError as", "poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def triangle_poly(poly, obj): trangle = {} triangulator3 =", "geom_vertex_format = get_format(obj) color = geom_vertex_format[1] texcoord = geom_vertex_format[2] vdata = GeomVertexData(obj.data.name, geom_vertex_format[0],", "obj.data.vertices[triangle.vertices[1]].co[1], obj.data.vertices[triangle.vertices[1]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[1]].normal[0], obj.data.vertices[triangle.vertices[1]].normal[1], obj.data.vertices[triangle.vertices[1]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord:", "for poly in obj.data.polygons: if not check_coplanar(obj, poly): not_coplanar.append(poly) for i in obj.data.vertices:", "for name in named_triangles: for poly in named_triangles[name]: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co))", "vertex_position.add_data3(obj.data.vertices[triangle.vertices[1]].co[0], obj.data.vertices[triangle.vertices[1]].co[1], obj.data.vertices[triangle.vertices[1]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[1]].normal[0], obj.data.vertices[triangle.vertices[1]].normal[1], obj.data.vertices[triangle.vertices[1]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if", "name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[1]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[1]].color[0], obj.data.vertex_colors.active.data[triangle.loops[1]].color[1], obj.data.vertex_colors.active.data[triangle.loops[1]].color[2], obj.data.vertex_colors.active.data[triangle.loops[1]].color[3]) else:", "OrthographicLens() lens.set_film_size(abs(frame_size[0][0]) + abs(frame_size[1][0]), abs(frame_size[0][1]) + abs(frame_size[1][1])) lens.set_focal_length(abs(frame_size[0][2])) lens.set_near_far(obj.data.clip_start, obj.data.clip_end) if obj.hatcher.coordinate_system ==", "то создаем. if not os.path.exists(path_project_save): try: os.makedirs(path_project_save) except OSError as error: #print(error) pass", "obj.hatcher.draw_mask_2.decode('utf-8'), obj.hatcher.draw_mask_3.decode('utf-8'), obj.hatcher.draw_mask_4.decode('utf-8'), obj.hatcher.draw_mask_5.decode('utf-8'), obj.hatcher.draw_mask_6.decode('utf-8'), obj.hatcher.draw_mask_7.decode('utf-8'), obj.hatcher.draw_mask_8.decode('utf-8')) camera.camera_mask = int(bit, 2) camera.set_lens(lens) return", "ExportSelected(bpy.types.Operator): bl_idname = \"ui.export_selected\" bl_label = \"Generator_selected\" def execute(self, context): start_time = datetime.now()", "столкновения содержит тела. if collision_node.getNumSolids() >= 1: node_path = NodePath(collision_node) node_path.reparentTo(group) # Если", "сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_other) # Проверяем существует ли директория, если нет то", "вершин. if color: color_vertex_list = {'color': GeomVertexWriter(vdata, 'color')} # Так же создаем дополнительные", "in coplanar: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3])", "1e-6: status = True else: status = False return status def select_not_coplanar(obj): not_coplanar", "из компланарных прямольников. for name in named_coplanar: for poly in named_coplanar[name]: for index", "name in named_coplanar: for poly in named_coplanar[name]: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict,", "= \"Generator_object\" def execute(self, context): start_time = datetime.now() context.view_layer.update() # Перебираем список выбранных", "build_hierarchy(obj, context.scene) root.add_child(node) else: node = build_hierarchy(obj, context.scene) root.add_child(node) # Объединяем путь директории", "lens.set_near_far(obj.data.clip_start, obj.data.clip_end) if obj.hatcher.coordinate_system == \"CS_default\": lens.set_coordinate_system(CS_default) if obj.hatcher.coordinate_system == \"CS_zup_right\": lens.set_coordinate_system(CS_zup_right) if", "obj.hatcher.draw_mask_6.decode('utf-8'), obj.hatcher.draw_mask_7.decode('utf-8'), obj.hatcher.draw_mask_8.decode('utf-8')) camera.camera_mask = int(bit, 2) camera.set_lens(lens) return camera def build_hierarchy(obj, scene):", "in obj.data.polygons: i.select = False for poly in not_coplanar: poly.select = True bpy.ops.object.mode_set(mode='EDIT')", "== 3: for index in poly.vertices[2:]: add_polygons_to_dict(named_triangles, poly, obj) # Если у полигона", "данного типа объекта. create_object = None # Если объект является сеткой. if obj.type", "такой объект в иерархии. result = root.find('**/{}'.format(parent.name)) if result: np.reparentTo(result) np.set_transform(root, conversion_transform(obj)) else:", "NodePath(obj.name) collision_node_dict = {} vertext_quad = [] # Создаем полигоны столкновения из треугольников.", "текстурные координаты у объекта. if obj.data.uv_layers.active: texcoord = True # Создаем колонку для", "и имя файла. path_save = os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) else: node = build_hierarchy(obj,", "node) else: node = build_hierarchy(obj, context.scene) # Объединяем путь директории и имя файла.", "collision_node.setIntoCollideMask(int(into_mask, 2)) node_path = NodePath(collision_node) node_path.reparentTo(group) if obj.data.materials[collision_node.name].hatcher.visibility_collision_polygons: node_path.show() collision_node = CollisionNode(obj.name) #", "mesh.calc_loop_triangles() # Сюда записиваются индексы обработаных вершин. list_vertext = {} # Проходим по", "name) # Создаем полигоны столкновения из многоугольников. for name in named_not_quad: # Нужно", "if obj.type == \"LIGHT\": create_object = \"LIGHT\" # Если объект является камерой. if", "add_polygons_to_dict(named_not_quad, poly, obj) # Если нет материала, то рассортировываем по спискам else: #", "mesh.loop_triangles: # Обработка первой вершины. if not triangle.loops[0] in list_vertext: vertex_position.set_row(triangle.loops[0]) normal_vertex.set_row(triangle.loops[0]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[0]].co[0],", "\"\", title = \"Message Box\", icon = 'INFO'): def draw(self, context): self.layout.label(text =", "и относительную директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_scene) # Проверяем существует ли директория,", "'{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.from_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.into_mask_1.decode('utf-8'),", "= NodePath(collision_node) node_path.reparentTo(group) # Если стоит флажок показывать полигон столкновения. if obj.hatcher.visibility_collision_polygons: node_path.show()", "Если объект является источником цвета. if obj.type == \"LIGHT\": create_object = \"LIGHT\" #", "[] for poly in obj.data.polygons: if len(poly.vertices) >= 5: not_quad.append(poly) for i in", "npp.reparentTo(root) npp.set_transform(root, conversion_transform(obj)) else: # Если нет родителя. np = NodePath(create_object(obj, scene)) #np.setName(obj.name)", "3, Geom.NT_float32, Geom.C_point) geom_vertex_format.add_column(\"normal\", 3, Geom.NT_float32, Geom.C_normal) # Проверка есть ли цвета вершин", "ostream = Notify.out() list_object_support = {'MESH': False, 'PERSP': False, 'ORTHO': False, 'CAMERA':True} def", "writer.writeObject(obj) writer.flush() file.close() def conversion_transform(obj): pos = Point3(*obj.matrix_world.translation) quat = LQuaternion(*obj.matrix_world.to_quaternion()) scale =", "texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[2]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[2]].uv[0], obj.data.uv_layers.active.data[triangle.loops[2]].uv[1]) else:", "= int(bit, 2) camera.set_lens(lens) return camera def build_hierarchy(obj, scene): # Узел для формирование", "collision_node_dict[name] = collision_node def collision_polygon_create(obj, scene): named_triangles = {} named_coplanar = {} named_not_coplanar", "len(poly.vertices) == 4: if check_coplanar(obj, poly): coplanar.append(poly) else: not_coplanar.append(poly) # Если у полигона", "not check_coplanar(obj, poly): not_coplanar.append(poly) for i in obj.data.vertices: i.select=False for i in obj.data.edges:", "os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) else: node = build_hierarchy(obj, context.scene) # Объединяем путь директории", "Создаем колонку для цвета c именем по умолчанию. geom_vertex_format.add_column(\"color\", 4, Geom.NT_uint8, Geom.C_color) #", "color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[2]].color[0], obj.data.vertex_colors.active.data[triangle.loops[2]].color[1], obj.data.vertex_colors.active.data[triangle.loops[2]].color[2], obj.data.vertex_colors.active.data[triangle.loops[2]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[2]].color[0], obj.data.vertex_colors[name].data[triangle.loops[2]].color[1], obj.data.vertex_colors[name].data[triangle.loops[2]].color[2], obj.data.vertex_colors[name].data[triangle.loops[2]].color[3]) list_vertext[triangle.loops[2]] = None #", "vertext_quad[1], vertext_quad[2], vertext_quad[3]) collision_node.add_solid(quad) vertext_quad = [] # Нужно разбить некомпланарные полигоны, на", "существует ли директория, если нет то создаем. if not os.path.exists(path_project_save): try: os.makedirs(path_project_save) except", "же создаем дополнительные колонки. for uv in obj.data.uv_layers: # Если имя не совподает", "return camera def build_hierarchy(obj, scene): # Узел для формирование иерархии root = NodePath(\"root\")", "vertext[2]), name) # Создаем полигоны столкновения из многоугольников. for name in named_not_quad: #", "quat = LQuaternion(*obj.matrix_world.to_quaternion()) scale = Point3(*obj.matrix_world.to_scale()) transform = TransformState.make_pos_quat_scale(pos, quat, scale) return transform", "color_vertex_list[name].set_row(triangle.loops[0]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[0]].color[0], obj.data.vertex_colors.active.data[triangle.loops[0]].color[1], obj.data.vertex_colors.active.data[triangle.loops[0]].color[2], obj.data.vertex_colors.active.data[triangle.loops[0]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[0]].color[0], obj.data.vertex_colors[name].data[triangle.loops[0]].color[1], obj.data.vertex_colors[name].data[triangle.loops[0]].color[2],", "'{}{}{}{}{}{}{}{}'.format(obj.hatcher.draw_mask_1.decode('utf-8'), obj.hatcher.draw_mask_2.decode('utf-8'), obj.hatcher.draw_mask_3.decode('utf-8'), obj.hatcher.draw_mask_4.decode('utf-8'), obj.hatcher.draw_mask_5.decode('utf-8'), obj.hatcher.draw_mask_6.decode('utf-8'), obj.hatcher.draw_mask_7.decode('utf-8'), obj.hatcher.draw_mask_8.decode('utf-8')) camera.camera_mask = int(bit, 2) camera.set_lens(lens)", "объект является сеткой. if obj.type == \"MESH\": if obj.hatcher.type_mesh == \"Render\": create_object =", "obj.data.vertices[triangle.vertices[2]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[2]) if", "полигоны столкновения из компланарных прямольников. for poly in coplanar: for index in poly.vertices:", "if list_object_support[obj.type]: if not obj.data.type == 'PANO': node = build_hierarchy(obj, context.scene) root.add_child(node) else:", "my_format = GeomVertexFormat() my_format.addArray(geom_vertex_format) # Регистрируем формат. end_format = GeomVertexFormat.registerFormat(my_format) return end_format, color,", "LQuaternion(*obj.matrix_world.to_quaternion()) scale = Point3(*obj.matrix_world.to_scale()) transform = TransformState.make_pos_quat_scale(pos, quat, scale) return transform def get_format(obj):", "имя, рассортировываем их по словарям под этим именем. if hasattr(obj.data.materials[poly.material_index], 'name'): # Если", "# Если есть родитель. if not parent: npp = NodePath(create_object(obj, scene)) #npp.setName(obj.name) #npp.show()", "if obj.data.uv_layers.active: texcoord = True # Создаем колонку для координат c именем по", "obj.data.vertex_colors[name].data[triangle.loops[0]].color[1], obj.data.vertex_colors[name].data[triangle.loops[0]].color[2], obj.data.vertex_colors[name].data[triangle.loops[0]].color[3]) list_vertext[triangle.loops[0]] = None # Обработка второй вершины. if not triangle.loops[1]", "'name'): # Если полигон из трех вершин, проверка на компланарность не нужна. if", "Объединяем путь директории и имя сцены. path_save = os.path.join(path_project_save, context.scene.name) bam_writer_file(path_save, root) show_message_box('Export", "selected, completed, time: {}'.format(datetime.now() - start_time), \"Message\") return {'FINISHED'} class CheckingCoplanarity(bpy.types.Operator): bl_idname =", "else: # Если полигон из трех вершин, проверка на компланарность не нужна. if", "end_format, color, texcoord def geom_create(obj): geom_vertex_format = get_format(obj) color = geom_vertex_format[1] texcoord =", "import Camera, PerspectiveLens, OrthographicLens, CS_default, CS_zup_right, CS_yup_right, CS_zup_left, CS_yup_left, CS_invalid from panda3d.core import", "normal_vertex.add_data3(obj.data.vertices[triangle.vertices[0]].normal[0], obj.data.vertices[triangle.vertices[0]].normal[1], obj.data.vertices[triangle.vertices[0]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list:", "status def select_not_coplanar(obj): not_coplanar = [] for poly in obj.data.polygons: if not check_coplanar(obj,", "if len(obj.data.materials) >= 1: # Если есть слот материала и он содержит имя,", "треугольники. elif len(poly.vertices) >= 4: not_quad.append(poly) ######################## ######################## group = NodePath(obj.name) collision_node_dict =", "= geom_vertex_format[2] vdata = GeomVertexData(obj.data.name, geom_vertex_format[0], Geom.UHStatic) vdata.set_num_rows(len(obj.data.vertices)) vertex_position = GeomVertexWriter(vdata, 'vertex') normal_vertex", "obj.data.uv_layers[name].data[triangle.loops[1]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[1]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[1]].color[0],", "по словарям под этим именем. if hasattr(obj.data.materials[poly.material_index], 'name'): # Если полигон из трех", "from mathutils.geometry import distance_point_to_plane ostream = Notify.out() list_object_support = {'MESH': False, 'PERSP': False,", "if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[1]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[1]].uv[0], obj.data.uv_layers.active.data[triangle.loops[1]].uv[1])", "in dict_named: # Дабавляем ключ и список. dict_named[obj.data.materials[poly.material_index].name] = [poly] else: # Если", "полигоны у которых более четырех сторон на треугольники. for poly in not_quad: for", "named_coplanar = {} named_not_coplanar = {} named_not_quad = {} triangles = [] coplanar", "# Создаем полигоны столкновения из компланарных прямольников. for poly in coplanar: for index", "vertext in triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) from_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.from_mask_1.decode('utf-8'),", "in collision_node_dict.values(): from_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.from_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask,", "False, 'CAMERA':True} def show_message_box(message = \"\", title = \"Message Box\", icon = 'INFO'):", "vertex_position.add_data3(obj.data.vertices[triangle.vertices[2]].co[0], obj.data.vertices[triangle.vertices[2]].co[1], obj.data.vertices[triangle.vertices[2]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[2]].normal[0], obj.data.vertices[triangle.vertices[2]].normal[1], obj.data.vertices[triangle.vertices[2]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if", "triangulator3.triangulate() for i in range(triangulator3.getNumTriangles()): v0 = triangulator3.get_vertex(triangulator3.get_triangle_v0(i)) v1 = triangulator3.get_vertex(triangulator3.get_triangle_v1(i)) v2 =", "uv.name == obj.data.uv_layers.active.name: texcoord_vertex_list[uv.name] = GeomVertexWriter(vdata, 'texcoord.{}'.format(uv.name)) # Запишем порядок треугольников. prim =", "for i in obj.data.vertices: i.select=False for i in obj.data.edges: i.select=False for i in", "CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) # Нужно разбить полигоны у которых более четырех сторон", "if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[2]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[2]].color[0], obj.data.vertex_colors.active.data[triangle.loops[2]].color[1],", "камерой. if obj.type == \"CAMERA\": if obj.data.type != 'PANO': create_object = camera_create #", "else: node = build_hierarchy(obj, context.scene) root.add_child(node) # Объединяем путь директории и имя сцены.", "Если имя не совподает с активным. if not col.name == obj.data.vertex_colors.active.name: color_vertex_list[col.name] =", "None # Добавляем вершины в примитив. prim.addVertices(triangle.loops[0], triangle.loops[1], triangle.loops[2]) prim.closePrimitive() geom = Geom(vdata)", "колонку для вершин. geom_vertex_format.add_column(\"vertex\", 3, Geom.NT_float32, Geom.C_point) geom_vertex_format.add_column(\"normal\", 3, Geom.NT_float32, Geom.C_normal) # Проверка", "obj.data.materials[collision_node.name].hatcher.from_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.into_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_4.decode('utf-8'),", "from_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.from_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask", "объекта среди поддерживаемых. if obj.type in list_object_support: # Если есть ли подтип. if", "obj.data.uv_layers[name].data[triangle.loops[0]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[0]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[0]].color[0],", "not uv.name == obj.data.uv_layers.active.name: geom_vertex_format.add_column('texcoord.{}'.format(uv.name), 2, Geom.NT_float32, Geom.C_texcoord) # Создаем формат. my_format =", "obj.data.vertices[poly.vertices[1]].co - obj.data.vertices[poly.vertices[0]].co v2 = obj.data.vertices[poly.vertices[2]].co - obj.data.vertices[poly.vertices[0]].co for index in poly.vertices[3:]: if", "def recurse(obj, parent): # Переменая которая содережит функцию необходимую для экспорта данного типа", "= os.path.join(path_project_save, obj.name) node = build_hierarchy(obj, context.scene) root = ModelRoot('{}.bam'.format(obj.name)) root.add_child(node) bam_writer_file(path_save, root)", "\"Generator_object\" def execute(self, context): start_time = datetime.now() context.view_layer.update() # Перебираем список выбранных объектов.", "if not uv.name == obj.data.uv_layers.active.name: geom_vertex_format.add_column('texcoord.{}'.format(uv.name), 2, Geom.NT_float32, Geom.C_texcoord) # Создаем формат. my_format", "normal_vertex.add_data3(obj.data.vertices[triangle.vertices[2]].normal[0], obj.data.vertices[triangle.vertices[2]].normal[1], obj.data.vertices[triangle.vertices[2]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list:", "\"Message\") return {'FINISHED'} class ExportScene(bpy.types.Operator): bl_idname = \"ui.export_scene\" bl_label = \"Generator_scene\" def execute(self,", "class CheckingCoplanarity(bpy.types.Operator): bl_idname = \"ui.check_coplanarity\" bl_label = \"Checking_coplanarity\" def execute(self, context): select_not_coplanar(context.object) return", "for poly in not_coplanar: poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def triangle_poly(poly, obj): trangle", "файла. path_save = os.path.join(path_project_save, context.scene.hatcher.file_name_selected) bam_writer_file(path_save, root) # Если нет, то раздельно. else:", "vertext_quad[1], vertext_quad[2]) collision_node.add_solid(quad) vertext_quad = [] # Создаем полигоны столкновения из компланарных прямольников.", "np.reparentTo(result) np.set_transform(root, conversion_transform(obj)) else: np.reparentTo(root) np.set_transform(root, conversion_transform(obj)) # Проходим по детям. for child", "= datetime.now() context.view_layer.update() # Объединяем путь проекта и относительную директорию сцены. path_project_save =", "тип объекта среди поддерживаемых. if obj.type in list_object_support: # Если есть ли подтип.", "geom_node.addGeom(geom) return geom_node def camera_create(obj, scene): frame_size = obj.data.view_frame(scene = scene) if obj.data.type", "если нет то создаем. checkcreate_dirs(path_project_save) # Создаем корень для объединения. root = ModelRoot('{}.bam'.format(context.scene.name))", "по треугольниуам. for triangle in mesh.loop_triangles: # Обработка первой вершины. if not triangle.loops[0]", "obj.hatcher.into_mask_5.decode('utf-8'), obj.hatcher.into_mask_6.decode('utf-8'), obj.hatcher.into_mask_7.decode('utf-8'), obj.hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) # Если полигон столкновения содержит тела. if", "collision_node_dict.values(): from_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.from_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2))", "v0[1], v0[2]), (v1[0], v1[1], v1[2]), (v2[0], v2[1], v2[2])) return trangle def add_polygons_to_dict(dict_named, poly,", "index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]) collision_node.add_solid(quad) vertext_quad =", "Geom.NT_float32, Geom.C_texcoord) # Так же создаем дополнительные колонки. for uv in obj.data.uv_layers: #", "color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[0]].color[0], obj.data.vertex_colors[name].data[triangle.loops[0]].color[1], obj.data.vertex_colors[name].data[triangle.loops[0]].color[2], obj.data.vertex_colors[name].data[triangle.loops[0]].color[3]) list_vertext[triangle.loops[0]] = None # Обработка второй вершины. if not", "len(poly.vertices) >= 5: not_quad.append(poly) for i in obj.data.vertices: i.select=False for i in obj.data.edges:", "named_coplanar: for poly in named_coplanar[name]: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1],", "иерархии. result = root.find('**/{}'.format(parent.name)) if result: np.reparentTo(result) np.set_transform(root, conversion_transform(obj)) else: np.reparentTo(root) np.set_transform(root, conversion_transform(obj))", "camera = Camera(obj.data.name) camera.active = obj.hatcher.camera_active bit = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.draw_mask_1.decode('utf-8'), obj.hatcher.draw_mask_2.decode('utf-8'), obj.hatcher.draw_mask_3.decode('utf-8'), obj.hatcher.draw_mask_4.decode('utf-8'), obj.hatcher.draw_mask_5.decode('utf-8'),", "trangle[i] = ((v0[0], v0[1], v0[2]), (v1[0], v1[1], v1[2]), (v2[0], v2[1], v2[2])) return trangle", "объекта. if obj.data.vertex_colors.active: color = True # Создаем колонку для цвета c именем", "= {} named_coplanar = {} named_not_coplanar = {} named_not_quad = {} triangles =", "Если нет материала, то рассортировываем по спискам else: # Если полигон из трех", "def colnode_add_dict(collision_node_dict, quad, name): if name in collision_node_dict: collision_node_dict[name].add_solid(quad) else: collision_node = CollisionNode(name)", "# Проверяем есть ли такой объект в иерархии. result = root.find('**/{}'.format(parent.name)) if result:", "named_coplanar[name]: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]), name) vertext_quad", "collision_node.add_solid(quad) from_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.from_mask_1.decode('utf-8'), obj.hatcher.from_mask_2.decode('utf-8'), obj.hatcher.from_mask_3.decode('utf-8'), obj.hatcher.from_mask_4.decode('utf-8'), obj.hatcher.from_mask_5.decode('utf-8'), obj.hatcher.from_mask_6.decode('utf-8'), obj.hatcher.from_mask_7.decode('utf-8'), obj.hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2))", "треугольников. prim = GeomTriangles(Geom.UHStatic) prim.makeIndexed() prim.setIndexType(Geom.NT_uint32) mesh = obj.data mesh.calc_loop_triangles() # Сюда записиваются", "parent): # Переменая которая содережит функцию необходимую для экспорта данного типа объекта. create_object", "Если поле имени файла заполнено, то объеденяем в один файл. if not context.scene.hatcher.file_name_selected", "texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[1]].uv[0], obj.data.uv_layers[name].data[triangle.loops[1]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[1]) if name == 'color':", "# Переменая которая содережит функцию необходимую для экспорта данного типа объекта. create_object =", "CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]) collision_node.add_solid(quad) vertext_quad = [] # Нужно разбить некомпланарные полигоны,", "= True # Создаем колонку для цвета c именем по умолчанию. geom_vertex_format.add_column(\"color\", 4,", "проверка на компланарность не нужна. if len(poly.vertices) == 3: for index in poly.vertices[2:]:", "if obj.hatcher.coordinate_system == \"CS_yup_left\": lens.set_coordinate_system(CS_yup_left) if obj.hatcher.coordinate_system == \"CS_invalid\": lens.set_coordinate_system(CS_invalid) camera = Camera(obj.data.name)", "== 'PANO': node = build_hierarchy(obj, context.scene) # Объединяем путь директории и имя файла.", "Если имя не совподает с активным. if not uv.name == obj.data.uv_layers.active.name: geom_vertex_format.add_column('texcoord.{}'.format(uv.name), 2,", "Перебираем список выбранных объектов. for obj in context.selected_objects: # Объединяем путь проекта и", "= build_hierarchy(obj, context.scene) root.add_child(node) # Объединяем путь директории и имя файла. path_save =", "panda3d.core import Point3, TransformState, LQuaternion from panda3d.core import Camera, PerspectiveLens, OrthographicLens, CS_default, CS_zup_right,", "= Notify.out() list_object_support = {'MESH': False, 'PERSP': False, 'ORTHO': False, 'CAMERA':True} def show_message_box(message", "bam_writer_file(path_save, root) show_message_box('Export scene, completed, time: {}'.format(datetime.now() - start_time), \"Message\") return {'FINISHED'} class", "# Перебираем полигоны объекта. for poly in obj.data.polygons: # Если список материалов не", "obj.parent: # Проверим есть ли данный тип объекта среди поддерживаемых. if obj.type in", "in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[2]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[2]].color[0], obj.data.vertex_colors.active.data[triangle.loops[2]].color[1], obj.data.vertex_colors.active.data[triangle.loops[2]].color[2], obj.data.vertex_colors.active.data[triangle.loops[2]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[2]].color[0],", "abs(frame_size[0][1]) + abs(frame_size[1][1])) lens.set_focal_length(abs(frame_size[0][2])) lens.set_near_far(obj.data.clip_start, obj.data.clip_end) if obj.hatcher.coordinate_system == \"CS_default\": lens.set_coordinate_system(CS_default) if obj.hatcher.coordinate_system", "list_vertext: vertex_position.set_row(triangle.loops[2]) normal_vertex.set_row(triangle.loops[2]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[2]].co[0], obj.data.vertices[triangle.vertices[2]].co[1], obj.data.vertices[triangle.vertices[2]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[2]].normal[0], obj.data.vertices[triangle.vertices[2]].normal[1], obj.data.vertices[triangle.vertices[2]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0],", "есть такой ключ, добавляем к списку. dict_named[obj.data.materials[poly.material_index].name].append(poly) def colnode_add_dict(collision_node_dict, quad, name): if name", "else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[2]].uv[0], obj.data.uv_layers[name].data[triangle.loops[2]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[2]) if name ==", "if not triangle.loops[2] in list_vertext: vertex_position.set_row(triangle.loops[2]) normal_vertex.set_row(triangle.loops[2]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[2]].co[0], obj.data.vertices[triangle.vertices[2]].co[1], obj.data.vertices[triangle.vertices[2]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[2]].normal[0],", "scene, completed, time: {}'.format(datetime.now() - start_time), \"Message\") return {'FINISHED'} class ExportSelected(bpy.types.Operator): bl_idname =", "vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]), name) vertext_quad = [] # Создаем полигоны", "bl_label = \"Generator_object\" def execute(self, context): start_time = datetime.now() context.view_layer.update() # Перебираем список", "quat, scale) return transform def get_format(obj): color = False texcoord = False #", "Так же создаем дополнительные слои. for uv in obj.data.uv_layers: # Если имя не", "\"CS_zup_right\": lens.set_coordinate_system(CS_zup_right) if obj.hatcher.coordinate_system == \"CS_yup_right\": lens.set_coordinate_system(CS_yup_right) if obj.hatcher.coordinate_system == \"CS_zup_left\": lens.set_coordinate_system(CS_zup_left) if", "== \"CS_yup_left\": lens.set_coordinate_system(CS_yup_left) if obj.hatcher.coordinate_system == \"CS_invalid\": lens.set_coordinate_system(CS_invalid) camera = Camera(obj.data.name) camera.active =", "name in named_not_quad: # Нужно разбить многоугольники на треугольники. for poly in named_not_quad[name]:", "def conversion_transform(obj): pos = Point3(*obj.matrix_world.translation) quat = LQuaternion(*obj.matrix_world.to_quaternion()) scale = Point3(*obj.matrix_world.to_scale()) transform =", "# Проверка есть ли активные текстурные координаты у объекта. if obj.data.uv_layers.active: texcoord =", "путь директории и имя файла. path_save = os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) else: node", "def execute(self, context): start_time = datetime.now() context.view_layer.update() # Перебираем список выбранных объектов. for", "context.view_layer.update() # Объединяем путь проекта и относительную директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_other)", "совподает с активным. if not col.name == obj.data.vertex_colors.active.name: geom_vertex_format.add_column('color.{}'.format(col.name), 4, Geom.NT_uint8, Geom.C_color) #", "\"ui.check_coplanarity\" bl_label = \"Checking_coplanarity\" def execute(self, context): select_not_coplanar(context.object) return {'FINISHED'} class CheckingQuad(bpy.types.Operator): bl_idname", "obj.data.type == 'PANO': node = build_hierarchy(obj, context.scene) root.add_child(node) else: node = build_hierarchy(obj, context.scene)", "'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[0]].color[0], obj.data.vertex_colors.active.data[triangle.loops[0]].color[1], obj.data.vertex_colors.active.data[triangle.loops[0]].color[2], obj.data.vertex_colors.active.data[triangle.loops[0]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[0]].color[0], obj.data.vertex_colors[name].data[triangle.loops[0]].color[1], obj.data.vertex_colors[name].data[triangle.loops[0]].color[2], obj.data.vertex_colors[name].data[triangle.loops[0]].color[3]) list_vertext[triangle.loops[0]] = None", "из треугольников. for name in named_triangles: for poly in named_triangles[name]: for index in", "# Если поле имени файла заполнено, то объеденяем в один файл. if not", "[] for poly in obj.data.polygons: if not check_coplanar(obj, poly): not_coplanar.append(poly) for i in", "def collision_polygon_create(obj, scene): named_triangles = {} named_coplanar = {} named_not_coplanar = {} named_not_quad", "и он содержит имя, рассортировываем их по словарям под этим именем. if hasattr(obj.data.materials[poly.material_index],", "in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]) collision_node.add_solid(quad) vertext_quad = []", "start_time), \"Message\") return {'FINISHED'} class ExportScene(bpy.types.Operator): bl_idname = \"ui.export_scene\" bl_label = \"Generator_scene\" def", "= obj.data mesh.calc_loop_triangles() # Сюда записиваются индексы обработаных вершин. list_vertext = {} #", "poly, obj): # Если нет такого ключа в словаре. if not obj.data.materials[poly.material_index].name in", "normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[1]) if name ==", "texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[0]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[0]].uv[0], obj.data.uv_layers.active.data[triangle.loops[0]].uv[1]) else:", "list_vertext[triangle.loops[2]] = None # Добавляем вершины в примитив. prim.addVertices(triangle.loops[0], triangle.loops[1], triangle.loops[2]) prim.closePrimitive() geom", "v2[2])) return trangle def add_polygons_to_dict(dict_named, poly, obj): # Если нет такого ключа в", "name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[0]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[0]].color[0], obj.data.vertex_colors.active.data[triangle.loops[0]].color[1], obj.data.vertex_colors.active.data[triangle.loops[0]].color[2], obj.data.vertex_colors.active.data[triangle.loops[0]].color[3]) else:", "node = build_hierarchy(obj, context.scene) root.add_child(node) # Объединяем путь директории и имя сцены. path_save", "сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_scene) # Проверяем существует ли директория, если нет то", "# Если полигон компланарный if check_coplanar(obj, poly): add_polygons_to_dict(named_coplanar, poly, obj) else: add_polygons_to_dict(named_not_coplanar, poly,", "os.path.join(path_project_save, context.scene.hatcher.file_name_selected) bam_writer_file(path_save, root) # Если нет, то раздельно. else: # Перебираем список", "CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]) collision_node.add_solid(quad) vertext_quad = [] # Создаем полигоны столкновения из компланарных", "triangulator3.get_vertex(triangulator3.get_triangle_v1(i)) v2 = triangulator3.get_vertex(triangulator3.get_triangle_v2(i)) trangle[i] = ((v0[0], v0[1], v0[2]), (v1[0], v1[1], v1[2]), (v2[0],", "context.scene) root.add_child(node) else: node = build_hierarchy(obj, context.scene) root.add_child(node) # Объединяем путь директории и", "треугольниуам. for triangle in mesh.loop_triangles: # Обработка первой вершины. if not triangle.loops[0] in", "normal_vertex.set_row(triangle.loops[0]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[0]].co[0], obj.data.vertices[triangle.vertices[0]].co[1], obj.data.vertices[triangle.vertices[0]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[0]].normal[0], obj.data.vertices[triangle.vertices[0]].normal[1], obj.data.vertices[triangle.vertices[0]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2])", "= os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) else: node = build_hierarchy(obj, context.scene) # Объединяем путь", "vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) # Создаем полигоны столкновения", "if not triangle.loops[0] in list_vertext: vertex_position.set_row(triangle.loops[0]) normal_vertex.set_row(triangle.loops[0]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[0]].co[0], obj.data.vertices[triangle.vertices[0]].co[1], obj.data.vertices[triangle.vertices[0]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[0]].normal[0],", "context.scene) root.add_child(node) # Объединяем путь директории и имя сцены. path_save = os.path.join(path_project_save, context.scene.name)", "obj.data.uv_layers.active.data[triangle.loops[2]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[2]].uv[0], obj.data.uv_layers[name].data[triangle.loops[2]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[2]) if name", "True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def triangle_poly(poly, obj): trangle = {} triangulator3 = Triangulator3() index_tr", "context.scene.objects: # Нас интересуют объекты только без родителя. if not obj.parent: # Проверим", "вершин, необходимо разбить на треугольники. elif len(poly.vertices) >= 4: add_polygons_to_dict(named_not_quad, poly, obj) #", "triangle.loops[2]) prim.closePrimitive() geom = Geom(vdata) geom.addPrimitive(prim) return geom def select_not_quad(obj): not_quad = []", "i in obj.data.vertices: i.select=False for i in obj.data.edges: i.select=False for i in obj.data.polygons:", "obj.data.polygons: # Если список материалов не пуст. if len(obj.data.materials) >= 1: # Если", "треугольники. for poly in named_not_quad[name]: for vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1],", "lens.set_coordinate_system(CS_zup_left) if obj.hatcher.coordinate_system == \"CS_yup_left\": lens.set_coordinate_system(CS_yup_left) if obj.hatcher.coordinate_system == \"CS_invalid\": lens.set_coordinate_system(CS_invalid) camera =", "node_path = NodePath(collision_node) node_path.reparentTo(group) # Если стоит флажок показывать полигон столкновения. if obj.hatcher.visibility_collision_polygons:", "collision_node = CollisionNode(obj.name) # Создаем полигоны столкновения из треугольников. for poly in triangles:", "import Point3, TransformState, LQuaternion from panda3d.core import Camera, PerspectiveLens, OrthographicLens, CS_default, CS_zup_right, CS_yup_right,", "= \"ui.export_scene\" bl_label = \"Generator_scene\" def execute(self, context): start_time = datetime.now() context.view_layer.update() #", "color_vertex_list: color_vertex_list[name].set_row(triangle.loops[0]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[0]].color[0], obj.data.vertex_colors.active.data[triangle.loops[0]].color[1], obj.data.vertex_colors.active.data[triangle.loops[0]].color[2], obj.data.vertex_colors.active.data[triangle.loops[0]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[0]].color[0], obj.data.vertex_colors[name].data[triangle.loops[0]].color[1],", "'PERSP': lens = PerspectiveLens() if obj.data.type == 'ORTHO': lens = OrthographicLens() lens.set_film_size(abs(frame_size[0][0]) +", "словарям под этим именем. if hasattr(obj.data.materials[poly.material_index], 'name'): # Если полигон из трех вершин,", "show_message_box('Export object: {} completed, time: {}'.format(obj.name, datetime.now() - start_time), \"Message\") return {'FINISHED'} class", "объекта. for poly in obj.data.polygons: # Если список материалов не пуст. if len(obj.data.materials)", "не совподает с активным. if not uv.name == obj.data.uv_layers.active.name: geom_vertex_format.add_column('texcoord.{}'.format(uv.name), 2, Geom.NT_float32, Geom.C_texcoord)", "полигоны, на треугольники. for poly in not_coplanar: for vertext in triangle_poly(poly, obj).values(): quad", "колонку для координат c именем по умолчанию. geom_vertex_format.add_column(\"texcoord\", 2, Geom.NT_float32, Geom.C_texcoord) # Так", "совподает с активным. if not uv.name == obj.data.uv_layers.active.name: texcoord_vertex_list[uv.name] = GeomVertexWriter(vdata, 'texcoord.{}'.format(uv.name)) #", "родителя. np = NodePath(create_object(obj, scene)) #np.setName(obj.name) #np.show() # Проверяем есть ли такой объект", "на треугольники. for poly in named_not_quad[name]: for vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0],", "Geom.C_color) # Так же создаем дополнительные колонки. for col in obj.data.vertex_colors: # Если", "collision_node_dict = {} vertext_quad = [] # Создаем полигоны столкновения из треугольников. for", "named_not_quad = {} triangles = [] coplanar = [] not_coplanar = [] not_quad", "quad = CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2]) collision_node.add_solid(quad) vertext_quad = [] # Создаем полигоны столкновения", "in named_coplanar[name]: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]), name)", "'CAMERA':True} def show_message_box(message = \"\", title = \"Message Box\", icon = 'INFO'): def", "в иерархии. result = root.find('**/{}'.format(parent.name)) if result: np.reparentTo(result) np.set_transform(root, conversion_transform(obj)) else: np.reparentTo(root) np.set_transform(root,", "Так же создаем дополнительные слои. for col in obj.data.vertex_colors: # Если имя не", "triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[2]].normal[0], obj.data.vertices[triangle.vertices[2]].normal[1], obj.data.vertices[triangle.vertices[2]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in", "то создаем. checkcreate_dirs(path_project_save) # Создаем корень для объединения. root = ModelRoot('{}.bam'.format(context.scene.name)) # Пройдем", "GeomVertexFormat, GeomVertexData, GeomVertexWriter, Triangulator3, GeomTriangles from panda3d.core import GeomNode, PandaNode, NodePath, ModelRoot from", "список материалов не пуст. if len(obj.data.materials) >= 1: # Если есть слот материала", "разбить на треугольники. elif len(poly.vertices) >= 4: not_quad.append(poly) else: # Если полигон из", "NodePath(collision_node) node_path.reparentTo(group) if obj.data.materials[collision_node.name].hatcher.visibility_collision_polygons: node_path.show() collision_node = CollisionNode(obj.name) # Создаем полигоны столкновения из", "in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]), name) vertext_quad = [] #", "= '{}{}{}{}{}{}{}{}'.format(obj.hatcher.from_mask_1.decode('utf-8'), obj.hatcher.from_mask_2.decode('utf-8'), obj.hatcher.from_mask_3.decode('utf-8'), obj.hatcher.from_mask_4.decode('utf-8'), obj.hatcher.from_mask_5.decode('utf-8'), obj.hatcher.from_mask_6.decode('utf-8'), obj.hatcher.from_mask_7.decode('utf-8'), obj.hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask =", "title, icon = icon) def checkcreate_dirs(path_project_save): # Проверяем существует ли директория, если нет", "collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.into_mask_1.decode('utf-8'), obj.hatcher.into_mask_2.decode('utf-8'), obj.hatcher.into_mask_3.decode('utf-8'), obj.hatcher.into_mask_4.decode('utf-8'), obj.hatcher.into_mask_5.decode('utf-8'), obj.hatcher.into_mask_6.decode('utf-8'), obj.hatcher.into_mask_7.decode('utf-8'), obj.hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask,", "Проверяем есть ли такой объект в иерархии. result = root.find('**/{}'.format(parent.name)) if result: np.reparentTo(result)", "полигоны столкновения из компланарных прямольников. for name in named_coplanar: for poly in named_coplanar[name]:", "normal_vertex = GeomVertexWriter(vdata, 'normal') # Если используются цвета вершин. if color: color_vertex_list =", "разбить многоугольники на треугольники. for poly in named_not_quad[name]: for vertext in triangle_poly(poly, obj).values():", "# Нужно разбить некомпланарные полигоны, на треугольники. for poly in not_coplanar: for vertext", "lens.set_coordinate_system(CS_zup_right) if obj.hatcher.coordinate_system == \"CS_yup_right\": lens.set_coordinate_system(CS_yup_right) if obj.hatcher.coordinate_system == \"CS_zup_left\": lens.set_coordinate_system(CS_zup_left) if obj.hatcher.coordinate_system", "# Обработка первой вершины. if not triangle.loops[0] in list_vertext: vertex_position.set_row(triangle.loops[0]) normal_vertex.set_row(triangle.loops[0]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[0]].co[0], obj.data.vertices[triangle.vertices[0]].co[1],", "obj.name) node = build_hierarchy(obj, context.scene) root = ModelRoot('{}.bam'.format(obj.name)) root.add_child(node) bam_writer_file(path_save, root) show_message_box('Export object:", "else: not_coplanar.append(poly) # Если у полигона более четырех вершин, необходимо разбить на треугольники.", "записиваются индексы обработаных вершин. list_vertext = {} # Проходим по треугольниуам. for triangle", "triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) for collision_node in collision_node_dict.values(): from_mask =", "scene): geom = geom_create(obj) geom_node = GeomNode(obj.data.name) geom_node.addGeom(geom) return geom_node def camera_create(obj, scene):", "abs(distance_point_to_plane(obj.data.vertices[index].co, obj.data.vertices[poly.vertices[0]].co, v1.cross(v2))) < 1e-6: status = True else: status = False return", "for obj in context.selected_objects: # Проверим есть ли данный тип объекта среди поддерживаемых.", "треугольников. for poly in triangles: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) quad = CollisionPolygon(vertext_quad[0],", "= NodePath(create_object(obj, scene)) #np.setName(obj.name) #np.show() # Проверяем есть ли такой объект в иерархии.", "if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[2]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[2]].uv[0], obj.data.uv_layers.active.data[triangle.loops[2]].uv[1])", "True else: status = False return status def select_not_coplanar(obj): not_coplanar = [] for", "CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]), name) vertext_quad = [] # Создаем полигоны столкновения из", "else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[0]].color[0], obj.data.vertex_colors[name].data[triangle.loops[0]].color[1], obj.data.vertex_colors[name].data[triangle.loops[0]].color[2], obj.data.vertex_colors[name].data[triangle.loops[0]].color[3]) list_vertext[triangle.loops[0]] = None # Обработка второй вершины. if", "= geom_create(obj) geom_node = GeomNode(obj.data.name) geom_node.addGeom(geom) return geom_node def camera_create(obj, scene): frame_size =", "= False for poly in not_coplanar: poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def triangle_poly(poly,", "является источником цвета. if obj.type == \"LIGHT\": create_object = \"LIGHT\" # Если объект", "первой вершины. if not triangle.loops[0] in list_vertext: vertex_position.set_row(triangle.loops[0]) normal_vertex.set_row(triangle.loops[0]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[0]].co[0], obj.data.vertices[triangle.vertices[0]].co[1], obj.data.vertices[triangle.vertices[0]].co[2]) if", "ModelRoot('{}.bam'.format(context.scene.hatcher.file_name_selected)) # Перебираем список выбранных объектов. for obj in context.selected_objects: # Проверим есть", "с активным. if not uv.name == obj.data.uv_layers.active.name: geom_vertex_format.add_column('texcoord.{}'.format(uv.name), 2, Geom.NT_float32, Geom.C_texcoord) # Создаем", "Создаем корень для объединения. root = ModelRoot('{}.bam'.format(context.scene.hatcher.file_name_selected)) # Перебираем список выбранных объектов. for", "lens = OrthographicLens() lens.set_film_size(abs(frame_size[0][0]) + abs(frame_size[1][0]), abs(frame_size[0][1]) + abs(frame_size[1][1])) lens.set_focal_length(abs(frame_size[0][2])) lens.set_near_far(obj.data.clip_start, obj.data.clip_end) if", "if obj.hatcher.coordinate_system == \"CS_zup_left\": lens.set_coordinate_system(CS_zup_left) if obj.hatcher.coordinate_system == \"CS_yup_left\": lens.set_coordinate_system(CS_yup_left) if obj.hatcher.coordinate_system ==", "ExportObject(bpy.types.Operator): bl_idname = \"ui.export_object\" bl_label = \"Generator_object\" def execute(self, context): start_time = datetime.now()", "BamFile() file.openWrite(Filename.fromOsSpecific(path_save + '.bam')) writer: BamWriter = file.getWriter() writer.writeObject(obj) writer.flush() file.close() def conversion_transform(obj):", "Обработка первой вершины. if not triangle.loops[0] in list_vertext: vertex_position.set_row(triangle.loops[0]) normal_vertex.set_row(triangle.loops[0]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[0]].co[0], obj.data.vertices[triangle.vertices[0]].co[1], obj.data.vertices[triangle.vertices[0]].co[2])", "из многоугольников. for name in named_not_quad: # Нужно разбить многоугольники на треугольники. for", "= True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def triangle_poly(poly, obj): trangle = {} triangulator3 = Triangulator3()", "for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0], vertext_quad[1], vertext_quad[2], vertext_quad[3]), name) vertext_quad =", "из некомпланарных прямольников. for name in named_not_coplanar: # Нужно разбить некомпланарные полигоны, на", "name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[1]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[1]].uv[0], obj.data.uv_layers.active.data[triangle.loops[1]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[1]].uv[0], obj.data.uv_layers[name].data[triangle.loops[1]].uv[1])", "Обработка третьей вершины. if not triangle.loops[2] in list_vertext: vertex_position.set_row(triangle.loops[2]) normal_vertex.set_row(triangle.loops[2]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[2]].co[0], obj.data.vertices[triangle.vertices[2]].co[1], obj.data.vertices[triangle.vertices[2]].co[2])", "vertext_quad = [] # Создаем полигоны столкновения из некомпланарных прямольников. for name in", "geom_node_create if obj.hatcher.type_mesh == \"Collision\": create_object = collision_polygon_create # Если объект является источником", "{} triangulator3 = Triangulator3() index_tr = 0 for index in poly.vertices: triangulator3.add_polygon_vertex(index_tr) triangulator3.add_vertex(*obj.data.vertices[index].co)", "Если нет такого ключа в словаре. if not obj.data.materials[poly.material_index].name in dict_named: # Дабавляем", "if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[1]].normal[0], obj.data.vertices[triangle.vertices[1]].normal[1], obj.data.vertices[triangle.vertices[1]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name", "Объединяем путь директории и имя файла. path_save = os.path.join(path_project_save, obj.name) bam_writer_file(path_save, node) else:", "poly, obj) # Если нет материала, то рассортировываем по спискам else: # Если", "obj.data.materials[collision_node.name].hatcher.from_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.into_mask_1.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_2.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_3.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_4.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_6.decode('utf-8'),", "# Объединяем путь директории и имя сцены. path_save = os.path.join(path_project_save, context.scene.name) bam_writer_file(path_save, root)", "len(poly.vertices) >= 4: add_polygons_to_dict(named_not_quad, poly, obj) # Если нет материала, то рассортировываем по", "if abs(distance_point_to_plane(obj.data.vertices[index].co, obj.data.vertices[poly.vertices[0]].co, v1.cross(v2))) < 1e-6: status = True else: status = False", "вершины. if not triangle.loops[0] in list_vertext: vertex_position.set_row(triangle.loops[0]) normal_vertex.set_row(triangle.loops[0]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[0]].co[0], obj.data.vertices[triangle.vertices[0]].co[1], obj.data.vertices[triangle.vertices[0]].co[2]) if triangle.use_smooth:", "coplanar.append(poly) else: not_coplanar.append(poly) # Если у полигона более четырех вершин, необходимо разбить на", "= \"Generator_selected\" def execute(self, context): start_time = datetime.now() context.view_layer.update() # Объединяем путь проекта", "obj.data.vertex_colors[name].data[triangle.loops[0]].color[2], obj.data.vertex_colors[name].data[triangle.loops[0]].color[3]) list_vertext[triangle.loops[0]] = None # Обработка второй вершины. if not triangle.loops[1] in", "else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[2]].color[0], obj.data.vertex_colors[name].data[triangle.loops[2]].color[1], obj.data.vertex_colors[name].data[triangle.loops[2]].color[2], obj.data.vertex_colors[name].data[triangle.loops[2]].color[3]) list_vertext[triangle.loops[2]] = None # Добавляем вершины в примитив.", "i in obj.data.edges: i.select=False for i in obj.data.polygons: i.select = False for poly", "треугольники. for poly in named_not_coplanar[name]: for vertext in triangle_poly(poly, obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1],", "путь проекта и относительную директорию сцены. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, context.scene.hatcher.rel_path_scene) # Проверяем существует", "= collision_node def collision_polygon_create(obj, scene): named_triangles = {} named_coplanar = {} named_not_coplanar =", "vertext[1], vertext[2]) collision_node.add_solid(quad) from_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.from_mask_1.decode('utf-8'), obj.hatcher.from_mask_2.decode('utf-8'), obj.hatcher.from_mask_3.decode('utf-8'), obj.hatcher.from_mask_4.decode('utf-8'), obj.hatcher.from_mask_5.decode('utf-8'), obj.hatcher.from_mask_6.decode('utf-8'), obj.hatcher.from_mask_7.decode('utf-8'), obj.hatcher.from_mask_8.decode('utf-8'))", "poly in obj.data.polygons: if len(poly.vertices) >= 5: not_quad.append(poly) for i in obj.data.vertices: i.select=False", "file = BamFile() file.openWrite(Filename.fromOsSpecific(path_save + '.bam')) writer: BamWriter = file.getWriter() writer.writeObject(obj) writer.flush() file.close()", "обработаных вершин. list_vertext = {} # Проходим по треугольниуам. for triangle in mesh.loop_triangles:", "Дабавляем ключ и список. dict_named[obj.data.materials[poly.material_index].name] = [poly] else: # Если есть такой ключ,", "= [] # Создаем полигоны столкновения из компланарных прямольников. for name in named_coplanar:", "color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[0]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[0]].color[0], obj.data.vertex_colors.active.data[triangle.loops[0]].color[1], obj.data.vertex_colors.active.data[triangle.loops[0]].color[2],", "npp.set_transform(root, conversion_transform(obj)) else: # Если нет родителя. np = NodePath(create_object(obj, scene)) #np.setName(obj.name) #np.show()", "obj.hatcher.coordinate_system == \"CS_default\": lens.set_coordinate_system(CS_default) if obj.hatcher.coordinate_system == \"CS_zup_right\": lens.set_coordinate_system(CS_zup_right) if obj.hatcher.coordinate_system == \"CS_yup_right\":", "TransformState, LQuaternion from panda3d.core import Camera, PerspectiveLens, OrthographicLens, CS_default, CS_zup_right, CS_yup_right, CS_zup_left, CS_yup_left,", "необходимо разбить на треугольники. elif len(poly.vertices) >= 4: add_polygons_to_dict(named_not_quad, poly, obj) # Если", "CollisionPolygon(vertext[0], vertext[1], vertext[2]) collision_node.add_solid(quad) from_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.from_mask_1.decode('utf-8'), obj.hatcher.from_mask_2.decode('utf-8'), obj.hatcher.from_mask_3.decode('utf-8'), obj.hatcher.from_mask_4.decode('utf-8'), obj.hatcher.from_mask_5.decode('utf-8'), obj.hatcher.from_mask_6.decode('utf-8'), obj.hatcher.from_mask_7.decode('utf-8'),", "и имя файла. path_save = os.path.join(path_project_save, obj.name) node = build_hierarchy(obj, context.scene) root =", "obj.data.vertex_colors[name].data[triangle.loops[2]].color[3]) list_vertext[triangle.loops[2]] = None # Добавляем вершины в примитив. prim.addVertices(triangle.loops[0], triangle.loops[1], triangle.loops[2]) prim.closePrimitive()", "+= 1 triangulator3.triangulate() for i in range(triangulator3.getNumTriangles()): v0 = triangulator3.get_vertex(triangulator3.get_triangle_v0(i)) v1 = triangulator3.get_vertex(triangulator3.get_triangle_v1(i))", "i in obj.data.polygons: i.select = False for poly in not_quad: poly.select = True", "= GeomVertexWriter(vdata, 'normal') # Если используются цвета вершин. if color: color_vertex_list = {'color':", "np.reparentTo(root) np.set_transform(root, conversion_transform(obj)) # Проходим по детям. for child in obj.children: recurse(child, obj)", "obj.data.materials[collision_node.name].hatcher.into_mask_5.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_6.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_7.decode('utf-8'), obj.data.materials[collision_node.name].hatcher.into_mask_8.decode('utf-8')) collision_node.setIntoCollideMask(int(into_mask, 2)) node_path = NodePath(collision_node) node_path.reparentTo(group) if obj.data.materials[collision_node.name].hatcher.visibility_collision_polygons: node_path.show()", "context): self.layout.label(text = message) bpy.context.window_manager.popup_menu(draw, title = title, icon = icon) def checkcreate_dirs(path_project_save):", "except OSError as error: #print(error) pass def bam_writer_file(path_save, obj): file = BamFile() file.openWrite(Filename.fromOsSpecific(path_save", "\"Checking_coplanarity\" def execute(self, context): select_not_coplanar(context.object) return {'FINISHED'} class CheckingQuad(bpy.types.Operator): bl_idname = \"ui.check_quad\" bl_label", "же создаем дополнительные колонки. for col in obj.data.vertex_colors: # Если имя не совподает", "bpy.context.window_manager.popup_menu(draw, title = title, icon = icon) def checkcreate_dirs(path_project_save): # Проверяем существует ли", "not triangle.loops[0] in list_vertext: vertex_position.set_row(triangle.loops[0]) normal_vertex.set_row(triangle.loops[0]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[0]].co[0], obj.data.vertices[triangle.vertices[0]].co[1], obj.data.vertices[triangle.vertices[0]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[0]].normal[0], obj.data.vertices[triangle.vertices[0]].normal[1],", "i.select=False for i in obj.data.polygons: i.select = False for poly in not_quad: poly.select", "poly in obj.data.polygons: if not check_coplanar(obj, poly): not_coplanar.append(poly) for i in obj.data.vertices: i.select=False", "подтип. if list_object_support[obj.type]: if not obj.data.type == 'PANO': node = build_hierarchy(obj, context.scene) root.add_child(node)", "является сеткой. if obj.type == \"MESH\": if obj.hatcher.type_mesh == \"Render\": create_object = geom_node_create", "status = False # Если вершины три, это значит полигон автоматически копланарен. if", "collision_node.setIntoCollideMask(int(into_mask, 2)) # Если полигон столкновения содержит тела. if collision_node.getNumSolids() >= 1: node_path", "if not parent: npp = NodePath(create_object(obj, scene)) #npp.setName(obj.name) #npp.show() npp.reparentTo(root) npp.set_transform(root, conversion_transform(obj)) else:", "== \"CS_invalid\": lens.set_coordinate_system(CS_invalid) camera = Camera(obj.data.name) camera.active = obj.hatcher.camera_active bit = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.draw_mask_1.decode('utf-8'), obj.hatcher.draw_mask_2.decode('utf-8'),", "иерархии root = NodePath(\"root\") # Выполним рекурсию, для поиска всех. def recurse(obj, parent):", "v0 = triangulator3.get_vertex(triangulator3.get_triangle_v0(i)) v1 = triangulator3.get_vertex(triangulator3.get_triangle_v1(i)) v2 = triangulator3.get_vertex(triangulator3.get_triangle_v2(i)) trangle[i] = ((v0[0], v0[1],", "== 'PANO': node = build_hierarchy(obj, context.scene) root.add_child(node) else: node = build_hierarchy(obj, context.scene) root.add_child(node)", "scale) return transform def get_format(obj): color = False texcoord = False # Создаем", "node) show_message_box('Export selected, completed, time: {}'.format(datetime.now() - start_time), \"Message\") return {'FINISHED'} class CheckingCoplanarity(bpy.types.Operator):", "texcoord_vertex_list[uv.name] = GeomVertexWriter(vdata, 'texcoord.{}'.format(uv.name)) # Запишем порядок треугольников. prim = GeomTriangles(Geom.UHStatic) prim.makeIndexed() prim.setIndexType(Geom.NT_uint32)", "есть родитель. if not parent: npp = NodePath(create_object(obj, scene)) #npp.setName(obj.name) #npp.show() npp.reparentTo(root) npp.set_transform(root,", "то создаем. checkcreate_dirs(path_project_save) # Если поле имени файла заполнено, то объеденяем в один", "return transform def get_format(obj): color = False texcoord = False # Создаем новый", "node = build_hierarchy(obj, context.scene) root.add_child(node) else: node = build_hierarchy(obj, context.scene) root.add_child(node) # Объединяем", "normal_vertex.add_data3(obj.data.vertices[triangle.vertices[1]].normal[0], obj.data.vertices[triangle.vertices[1]].normal[1], obj.data.vertices[triangle.vertices[1]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name in texcoord_vertex_list:", "источником цвета. if obj.type == \"LIGHT\": create_object = \"LIGHT\" # Если объект является", "False return status def select_not_coplanar(obj): not_coplanar = [] for poly in obj.data.polygons: if", "GeomVertexArrayFormat() # Создаем колонку для вершин. geom_vertex_format.add_column(\"vertex\", 3, Geom.NT_float32, Geom.C_point) geom_vertex_format.add_column(\"normal\", 3, Geom.NT_float32,", "= False texcoord = False # Создаем новый массив. geom_vertex_format = GeomVertexArrayFormat() #", "содержит тела. if collision_node.getNumSolids() >= 1: node_path = NodePath(collision_node) node_path.reparentTo(group) # Если стоит", "bpy import bmesh from mathutils.geometry import distance_point_to_plane ostream = Notify.out() list_object_support = {'MESH':", "quad, name): if name in collision_node_dict: collision_node_dict[name].add_solid(quad) else: collision_node = CollisionNode(name) collision_node.add_solid(quad) collision_node_dict[name]", "= collision_polygon_create # Если объект является источником цвета. if obj.type == \"LIGHT\": create_object", "некомпланарных прямольников. for name in named_not_coplanar: # Нужно разбить некомпланарные полигоны, на треугольники.", "разбить некомпланарные полигоны, на треугольники. for poly in named_not_coplanar[name]: for vertext in triangle_poly(poly,", "for obj in context.scene.objects: # Нас интересуют объекты только без родителя. if not", "и имя сцены. path_save = os.path.join(path_project_save, context.scene.name) bam_writer_file(path_save, root) show_message_box('Export scene, completed, time:", "и имя файла. path_save = os.path.join(path_project_save, context.scene.hatcher.file_name_selected) bam_writer_file(path_save, root) # Если нет, то", "name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[0]].uv[0], obj.data.uv_layers.active.data[triangle.loops[0]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[0]].uv[0], obj.data.uv_layers[name].data[triangle.loops[0]].uv[1]) if color: for name in", "= message) bpy.context.window_manager.popup_menu(draw, title = title, icon = icon) def checkcreate_dirs(path_project_save): # Проверяем", "ли директория, если нет то создаем. if not os.path.exists(path_project_save): try: os.makedirs(path_project_save) except OSError", "дополнительные колонки. for uv in obj.data.uv_layers: # Если имя не совподает с активным.", "vertex_position = GeomVertexWriter(vdata, 'vertex') normal_vertex = GeomVertexWriter(vdata, 'normal') # Если используются цвета вершин.", "obj.data.vertex_colors: # Если имя не совподает с активным. if not col.name == obj.data.vertex_colors.active.name:", "к списку. dict_named[obj.data.materials[poly.material_index].name].append(poly) def colnode_add_dict(collision_node_dict, quad, name): if name in collision_node_dict: collision_node_dict[name].add_solid(quad) else:", "ли директория, если нет то создаем. checkcreate_dirs(path_project_save) # Создаем корень для объединения. root", "name in named_not_coplanar: # Нужно разбить некомпланарные полигоны, на треугольники. for poly in", "texcoord def geom_create(obj): geom_vertex_format = get_format(obj) color = geom_vertex_format[1] texcoord = geom_vertex_format[2] vdata", "obj.name) bam_writer_file(path_save, node) show_message_box('Export selected, completed, time: {}'.format(datetime.now() - start_time), \"Message\") return {'FINISHED'}", "list_vertext = {} # Проходим по треугольниуам. for triangle in mesh.loop_triangles: # Обработка", "vertext[2]) collision_node.add_solid(quad) from_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.from_mask_1.decode('utf-8'), obj.hatcher.from_mask_2.decode('utf-8'), obj.hatcher.from_mask_3.decode('utf-8'), obj.hatcher.from_mask_4.decode('utf-8'), obj.hatcher.from_mask_5.decode('utf-8'), obj.hatcher.from_mask_6.decode('utf-8'), obj.hatcher.from_mask_7.decode('utf-8'), obj.hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask,", "geom_vertex_format.add_column('texcoord.{}'.format(uv.name), 2, Geom.NT_float32, Geom.C_texcoord) # Создаем формат. my_format = GeomVertexFormat() my_format.addArray(geom_vertex_format) # Регистрируем", "== 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[1]].uv[0], obj.data.uv_layers.active.data[triangle.loops[1]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[1]].uv[0], obj.data.uv_layers[name].data[triangle.loops[1]].uv[1]) if color: for name in color_vertex_list:", "необходимую для экспорта данного типа объекта. create_object = None # Если объект является", "execute(self, context): start_time = datetime.now() context.view_layer.update() # Перебираем список выбранных объектов. for obj", "if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[1]].uv[0], obj.data.uv_layers.active.data[triangle.loops[1]].uv[1]) else: texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[1]].uv[0], obj.data.uv_layers[name].data[triangle.loops[1]].uv[1]) if color: for name", "if obj.type == \"MESH\": if obj.hatcher.type_mesh == \"Render\": create_object = geom_node_create if obj.hatcher.type_mesh", "Geom, GeomVertexFormat, GeomVertexData, GeomVertexWriter, Triangulator3, GeomTriangles from panda3d.core import GeomNode, PandaNode, NodePath, ModelRoot", "obj.data.materials[poly.material_index].name in dict_named: # Дабавляем ключ и список. dict_named[obj.data.materials[poly.material_index].name] = [poly] else: #", "def select_not_coplanar(obj): not_coplanar = [] for poly in obj.data.polygons: if not check_coplanar(obj, poly):", "poly, obj) # Если у полигона более четырех вершин, необходимо разбить на треугольники.", "сцены. path_save = os.path.join(path_project_save, context.scene.name) bam_writer_file(path_save, root) show_message_box('Export scene, completed, time: {}'.format(datetime.now() -", "Нужно разбить некомпланарные полигоны, на треугольники. for poly in named_not_coplanar[name]: for vertext in", "elif len(poly.vertices) >= 4: not_quad.append(poly) else: # Если полигон из трех вершин, проверка", "для вершин. geom_vertex_format.add_column(\"vertex\", 3, Geom.NT_float32, Geom.C_point) geom_vertex_format.add_column(\"normal\", 3, Geom.NT_float32, Geom.C_normal) # Проверка есть", "geom_vertex_format = GeomVertexArrayFormat() # Создаем колонку для вершин. geom_vertex_format.add_column(\"vertex\", 3, Geom.NT_float32, Geom.C_point) geom_vertex_format.add_column(\"normal\",", "списку. dict_named[obj.data.materials[poly.material_index].name].append(poly) def colnode_add_dict(collision_node_dict, quad, name): if name in collision_node_dict: collision_node_dict[name].add_solid(quad) else: collision_node", "obj.hatcher.from_mask_2.decode('utf-8'), obj.hatcher.from_mask_3.decode('utf-8'), obj.hatcher.from_mask_4.decode('utf-8'), obj.hatcher.from_mask_5.decode('utf-8'), obj.hatcher.from_mask_6.decode('utf-8'), obj.hatcher.from_mask_7.decode('utf-8'), obj.hatcher.from_mask_8.decode('utf-8')) collision_node.setFromCollideMask(int(from_mask, 2)) into_mask = '{}{}{}{}{}{}{}{}'.format(obj.hatcher.into_mask_1.decode('utf-8'), obj.hatcher.into_mask_2.decode('utf-8'),", "объекты только без родителя. if not obj.parent: # Проверим есть ли данный тип", "если нет то создаем. if not os.path.exists(path_project_save): try: os.makedirs(path_project_save) except OSError as error:", "= OrthographicLens() lens.set_film_size(abs(frame_size[0][0]) + abs(frame_size[1][0]), abs(frame_size[0][1]) + abs(frame_size[1][1])) lens.set_focal_length(abs(frame_size[0][2])) lens.set_near_far(obj.data.clip_start, obj.data.clip_end) if obj.hatcher.coordinate_system", "prim.closePrimitive() geom = Geom(vdata) geom.addPrimitive(prim) return geom def select_not_quad(obj): not_quad = [] for", "poly in not_quad: for vertext in triangle_poly(poly, obj).values(): quad = CollisionPolygon(vertext[0], vertext[1], vertext[2])", "if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[0]].normal[0], obj.data.vertices[triangle.vertices[0]].normal[1], obj.data.vertices[triangle.vertices[0]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2]) if texcoord: for name", "Geom.NT_uint8, Geom.C_color) # Так же создаем дополнительные колонки. for col in obj.data.vertex_colors: #", "obj.hatcher.coordinate_system == \"CS_zup_left\": lens.set_coordinate_system(CS_zup_left) if obj.hatcher.coordinate_system == \"CS_yup_left\": lens.set_coordinate_system(CS_yup_left) if obj.hatcher.coordinate_system == \"CS_invalid\":", "флажок показывать полигон столкновения. if obj.hatcher.visibility_collision_polygons: node_path.show() return group.node().getChild(0) def geom_node_create(obj, scene): geom", "полигон автоматически копланарен. if len(poly.vertices) == 3: status = True elif len(poly.vertices) >=", "def select_not_quad(obj): not_quad = [] for poly in obj.data.polygons: if len(poly.vertices) >= 5:", "= GeomVertexWriter(vdata, 'texcoord.{}'.format(uv.name)) # Запишем порядок треугольников. prim = GeomTriangles(Geom.UHStatic) prim.makeIndexed() prim.setIndexType(Geom.NT_uint32) mesh", "in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[0]) if name == 'color': color_vertex_list[name].addData4(obj.data.vertex_colors.active.data[triangle.loops[0]].color[0], obj.data.vertex_colors.active.data[triangle.loops[0]].color[1], obj.data.vertex_colors.active.data[triangle.loops[0]].color[2], obj.data.vertex_colors.active.data[triangle.loops[0]].color[3]) else: color_vertex_list[name].addData4(obj.data.vertex_colors[name].data[triangle.loops[0]].color[0],", "title = title, icon = icon) def checkcreate_dirs(path_project_save): # Проверяем существует ли директория,", "всем объектом в сцене. for obj in context.scene.objects: # Нас интересуют объекты только", "vertext_quad = [] # Создаем полигоны столкновения из компланарных прямольников. for poly in", "Если объект является камерой. if obj.type == \"CAMERA\": if obj.data.type != 'PANO': create_object", "named_triangles = {} named_coplanar = {} named_not_coplanar = {} named_not_quad = {} triangles", "не совподает с активным. if not col.name == obj.data.vertex_colors.active.name: geom_vertex_format.add_column('color.{}'.format(col.name), 4, Geom.NT_uint8, Geom.C_color)", "разбить некомпланарные полигоны, на треугольники. for poly in not_coplanar: for vertext in triangle_poly(poly,", "разбить полигоны у которых более четырех сторон на треугольники. for poly in not_quad:", "Перебираем список выбранных объектов. for obj in context.selected_objects: # Проверим есть ли данный", "obj).values(): colnode_add_dict(collision_node_dict, CollisionPolygon(vertext[0], vertext[1], vertext[2]), name) for collision_node in collision_node_dict.values(): from_mask = '{}{}{}{}{}{}{}{}'.format(obj.data.materials[collision_node.name].hatcher.from_mask_1.decode('utf-8'),", "normal_vertex.set_row(triangle.loops[1]) vertex_position.add_data3(obj.data.vertices[triangle.vertices[1]].co[0], obj.data.vertices[triangle.vertices[1]].co[1], obj.data.vertices[triangle.vertices[1]].co[2]) if triangle.use_smooth: normal_vertex.add_data3(obj.data.vertices[triangle.vertices[1]].normal[0], obj.data.vertices[triangle.vertices[1]].normal[1], obj.data.vertices[triangle.vertices[1]].normal[2]) else: normal_vertex.add_data3(triangle.normal[0], triangle.normal[1], triangle.normal[2])", "for name in named_not_coplanar: # Нужно разбить некомпланарные полигоны, на треугольники. for poly", "path_save = os.path.join(path_project_save, context.scene.name) bam_writer_file(path_save, root) show_message_box('Export scene, completed, time: {}'.format(datetime.now() - start_time),", "vertext_quad = [] # Создаем полигоны столкновения из треугольников. for name in named_triangles:", "obj.data.polygons: i.select = False for poly in not_quad: poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\")", "False for poly in not_coplanar: poly.select = True bpy.ops.object.mode_set(mode='EDIT') bpy.ops.mesh.select_mode(type=\"FACE\") def triangle_poly(poly, obj):", "context.scene.hatcher.rel_path_other) # Проверяем существует ли директория, если нет то создаем. checkcreate_dirs(path_project_save) # Если", "else: status = False return status def select_not_coplanar(obj): not_coplanar = [] for poly", "файла заполнено, то объеденяем в один файл. if not context.scene.hatcher.file_name_selected == '': #", "- start_time), \"Message\") return {'FINISHED'} class CheckingCoplanarity(bpy.types.Operator): bl_idname = \"ui.check_coplanarity\" bl_label = \"Checking_coplanarity\"", "if not col.name == obj.data.vertex_colors.active.name: color_vertex_list[col.name] = GeomVertexWriter(vdata, 'color.{}'.format(col.name)) # Если используются координаты", "\"ui.export_object\" bl_label = \"Generator_object\" def execute(self, context): start_time = datetime.now() context.view_layer.update() # Перебираем", "list_object_support: # Если есть ли подтип. if list_object_support[obj.type]: if not obj.data.type == 'PANO':", "вершин у объекта. if obj.data.vertex_colors.active: color = True # Создаем колонку для цвета", "Если есть ли подтип. if list_object_support[obj.type]: if not obj.data.type == 'PANO': node =", "add_polygons_to_dict(named_coplanar, poly, obj) else: add_polygons_to_dict(named_not_coplanar, poly, obj) # Если у полигона более четырех", "import os from datetime import datetime class ExportObject(bpy.types.Operator): bl_idname = \"ui.export_object\" bl_label =", "Box\", icon = 'INFO'): def draw(self, context): self.layout.label(text = message) bpy.context.window_manager.popup_menu(draw, title =", "разбить на треугольники. elif len(poly.vertices) >= 4: add_polygons_to_dict(named_not_quad, poly, obj) # Если нет", "= camera_create # Если есть родитель. if not parent: npp = NodePath(create_object(obj, scene))", "не совподает с активным. if not col.name == obj.data.vertex_colors.active.name: color_vertex_list[col.name] = GeomVertexWriter(vdata, 'color.{}'.format(col.name))", "и список. dict_named[obj.data.materials[poly.material_index].name] = [poly] else: # Если есть такой ключ, добавляем к", "vertext_quad[1], vertext_quad[2], vertext_quad[3]), name) vertext_quad = [] # Создаем полигоны столкновения из некомпланарных", "именем по умолчанию. geom_vertex_format.add_column(\"texcoord\", 2, Geom.NT_float32, Geom.C_texcoord) # Так же создаем дополнительные колонки.", "if color: color_vertex_list = {'color': GeomVertexWriter(vdata, 'color')} # Так же создаем дополнительные слои.", "in named_coplanar: for poly in named_coplanar[name]: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0],", "texcoord_vertex_list[name].addData2(obj.data.uv_layers[name].data[triangle.loops[2]].uv[0], obj.data.uv_layers[name].data[triangle.loops[2]].uv[1]) if color: for name in color_vertex_list: color_vertex_list[name].set_row(triangle.loops[2]) if name == 'color':", "= NodePath(create_object(obj, scene)) #npp.setName(obj.name) #npp.show() npp.reparentTo(root) npp.set_transform(root, conversion_transform(obj)) else: # Если нет родителя.", "collision_polygon_create(obj, scene): named_triangles = {} named_coplanar = {} named_not_coplanar = {} named_not_quad =", "по детям. for child in obj.children: recurse(child, obj) recurse(obj, obj.parent) return root.node().getChild(0) import", "triangle.normal[2]) if texcoord: for name in texcoord_vertex_list: texcoord_vertex_list[name].set_row(triangle.loops[0]) if name == 'texcoord': texcoord_vertex_list[name].addData2(obj.data.uv_layers.active.data[triangle.loops[0]].uv[0],", "in named_triangles: for poly in named_triangles[name]: for index in poly.vertices: vertext_quad.append(Point3(*obj.data.vertices[index].co)) colnode_add_dict(collision_node_dict, CollisionPolygon(vertext_quad[0],", "относительную директорию модели. path_project_save = os.path.join(context.scene.hatcher.ful_path_project, obj.hatcher.rel_path_object) # Проверяем существует ли директория, если" ]
[ "from .blueprint import form from .shared.render import render_template_with_title from .shared.routing import route_to_next_form_page from", "ApplyingOnOwnBehalfAnswers.NO.value: prev_path = \"/applying-on-own-behalf\" else: raise ValueError(\"Unexpected ApplyingOnOwnBehalfAnswers value encountered: \" + applying_on_own_behalf_answer)", "\"/nhs-login\" elif applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.NO.value: prev_path = \"/applying-on-own-behalf\" else: raise ValueError(\"Unexpected ApplyingOnOwnBehalfAnswers value", "ApplyingOnOwnBehalfAnswers value encountered: \" + applying_on_own_behalf_answer) return render_template_with_title( \"postcode-eligibility.html\", previous_path=prev_path, values={\"postcode\": session.get(\"postcode\", \"\")},", "else: raise ValueError(\"Unexpected ApplyingOnOwnBehalfAnswers value encountered: \" + applying_on_own_behalf_answer) return render_template_with_title( \"postcode-eligibility.html\", previous_path=prev_path,", "ValueError(\"Unexpected ApplyingOnOwnBehalfAnswers value encountered: \" + applying_on_own_behalf_answer) return render_template_with_title( \"postcode-eligibility.html\", previous_path=prev_path, values={\"postcode\": session.get(\"postcode\",", "previous_path=prev_path, values={\"postcode\": session.get(\"postcode\", \"\")}, **get_errors_from_session(\"postcode\"), ) @form.route(\"/postcode-eligibility\", methods=[\"POST\"]) def post_postcode_verification(): session[\"postcode\"] = request_form().get(\"postcode\")", "encountered: \" + applying_on_own_behalf_answer) return render_template_with_title( \"postcode-eligibility.html\", previous_path=prev_path, values={\"postcode\": session.get(\"postcode\", \"\")}, **get_errors_from_session(\"postcode\"), )", "applying_on_own_behalf_answer = get_answer_from_form([\"applying_on_own_behalf\"]) if applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.YES.value: prev_path = \"/nhs-login\" elif applying_on_own_behalf_answer ==", "form from .shared.render import render_template_with_title from .shared.routing import route_to_next_form_page from .shared.session import get_errors_from_session,", "@form.route(\"/postcode-eligibility\", methods=[\"GET\"]) def get_postcode_eligibility(): applying_on_own_behalf_answer = get_answer_from_form([\"applying_on_own_behalf\"]) if applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.YES.value: prev_path =", "from .shared.render import render_template_with_title from .shared.routing import route_to_next_form_page from .shared.session import get_errors_from_session, request_form,", "import form from .shared.render import render_template_with_title from .shared.routing import route_to_next_form_page from .shared.session import", "\"/applying-on-own-behalf\" else: raise ValueError(\"Unexpected ApplyingOnOwnBehalfAnswers value encountered: \" + applying_on_own_behalf_answer) return render_template_with_title( \"postcode-eligibility.html\",", "session.get(\"postcode\", \"\")}, **get_errors_from_session(\"postcode\"), ) @form.route(\"/postcode-eligibility\", methods=[\"POST\"]) def post_postcode_verification(): session[\"postcode\"] = request_form().get(\"postcode\") if not", "applying_on_own_behalf_answer) return render_template_with_title( \"postcode-eligibility.html\", previous_path=prev_path, values={\"postcode\": session.get(\"postcode\", \"\")}, **get_errors_from_session(\"postcode\"), ) @form.route(\"/postcode-eligibility\", methods=[\"POST\"]) def", "\"postcode-eligibility.html\", previous_path=prev_path, values={\"postcode\": session.get(\"postcode\", \"\")}, **get_errors_from_session(\"postcode\"), ) @form.route(\"/postcode-eligibility\", methods=[\"POST\"]) def post_postcode_verification(): session[\"postcode\"] =", "from .shared.validation import validate_postcode @form.route(\"/postcode-eligibility\", methods=[\"GET\"]) def get_postcode_eligibility(): applying_on_own_behalf_answer = get_answer_from_form([\"applying_on_own_behalf\"]) if applying_on_own_behalf_answer", "ApplyingOnOwnBehalfAnswers.YES.value: prev_path = \"/nhs-login\" elif applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.NO.value: prev_path = \"/applying-on-own-behalf\" else: raise", "get_answer_from_form([\"applying_on_own_behalf\"]) if applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.YES.value: prev_path = \"/nhs-login\" elif applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.NO.value: prev_path", "get_answer_from_form from .shared.validation import validate_postcode @form.route(\"/postcode-eligibility\", methods=[\"GET\"]) def get_postcode_eligibility(): applying_on_own_behalf_answer = get_answer_from_form([\"applying_on_own_behalf\"]) if", "get_errors_from_session, request_form, get_answer_from_form from .shared.validation import validate_postcode @form.route(\"/postcode-eligibility\", methods=[\"GET\"]) def get_postcode_eligibility(): applying_on_own_behalf_answer =", "render_template_with_title from .shared.routing import route_to_next_form_page from .shared.session import get_errors_from_session, request_form, get_answer_from_form from .shared.validation", "ApplyingOnOwnBehalfAnswers from .blueprint import form from .shared.render import render_template_with_title from .shared.routing import route_to_next_form_page", "vulnerable_people_form.form_pages.shared.answers_enums import ApplyingOnOwnBehalfAnswers from .blueprint import form from .shared.render import render_template_with_title from .shared.routing", ".shared.render import render_template_with_title from .shared.routing import route_to_next_form_page from .shared.session import get_errors_from_session, request_form, get_answer_from_form", "prev_path = \"/nhs-login\" elif applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.NO.value: prev_path = \"/applying-on-own-behalf\" else: raise ValueError(\"Unexpected", "value encountered: \" + applying_on_own_behalf_answer) return render_template_with_title( \"postcode-eligibility.html\", previous_path=prev_path, values={\"postcode\": session.get(\"postcode\", \"\")}, **get_errors_from_session(\"postcode\"),", "return render_template_with_title( \"postcode-eligibility.html\", previous_path=prev_path, values={\"postcode\": session.get(\"postcode\", \"\")}, **get_errors_from_session(\"postcode\"), ) @form.route(\"/postcode-eligibility\", methods=[\"POST\"]) def post_postcode_verification():", "== ApplyingOnOwnBehalfAnswers.NO.value: prev_path = \"/applying-on-own-behalf\" else: raise ValueError(\"Unexpected ApplyingOnOwnBehalfAnswers value encountered: \" +", "if applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.YES.value: prev_path = \"/nhs-login\" elif applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.NO.value: prev_path =", "route_to_next_form_page from .shared.session import get_errors_from_session, request_form, get_answer_from_form from .shared.validation import validate_postcode @form.route(\"/postcode-eligibility\", methods=[\"GET\"])", "+ applying_on_own_behalf_answer) return render_template_with_title( \"postcode-eligibility.html\", previous_path=prev_path, values={\"postcode\": session.get(\"postcode\", \"\")}, **get_errors_from_session(\"postcode\"), ) @form.route(\"/postcode-eligibility\", methods=[\"POST\"])", "= get_answer_from_form([\"applying_on_own_behalf\"]) if applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.YES.value: prev_path = \"/nhs-login\" elif applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.NO.value:", "def post_postcode_verification(): session[\"postcode\"] = request_form().get(\"postcode\") if not validate_postcode(session[\"postcode\"], \"postcode\"): return redirect(\"/postcode-eligibility\") session[\"error_items\"] =", "methods=[\"POST\"]) def post_postcode_verification(): session[\"postcode\"] = request_form().get(\"postcode\") if not validate_postcode(session[\"postcode\"], \"postcode\"): return redirect(\"/postcode-eligibility\") session[\"error_items\"]", "= \"/applying-on-own-behalf\" else: raise ValueError(\"Unexpected ApplyingOnOwnBehalfAnswers value encountered: \" + applying_on_own_behalf_answer) return render_template_with_title(", "= request_form().get(\"postcode\") if not validate_postcode(session[\"postcode\"], \"postcode\"): return redirect(\"/postcode-eligibility\") session[\"error_items\"] = {} return route_to_next_form_page()", "\" + applying_on_own_behalf_answer) return render_template_with_title( \"postcode-eligibility.html\", previous_path=prev_path, values={\"postcode\": session.get(\"postcode\", \"\")}, **get_errors_from_session(\"postcode\"), ) @form.route(\"/postcode-eligibility\",", "elif applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.NO.value: prev_path = \"/applying-on-own-behalf\" else: raise ValueError(\"Unexpected ApplyingOnOwnBehalfAnswers value encountered:", "from flask import redirect, session from vulnerable_people_form.form_pages.shared.answers_enums import ApplyingOnOwnBehalfAnswers from .blueprint import form", "import render_template_with_title from .shared.routing import route_to_next_form_page from .shared.session import get_errors_from_session, request_form, get_answer_from_form from", ") @form.route(\"/postcode-eligibility\", methods=[\"POST\"]) def post_postcode_verification(): session[\"postcode\"] = request_form().get(\"postcode\") if not validate_postcode(session[\"postcode\"], \"postcode\"): return", "from .shared.session import get_errors_from_session, request_form, get_answer_from_form from .shared.validation import validate_postcode @form.route(\"/postcode-eligibility\", methods=[\"GET\"]) def", "redirect, session from vulnerable_people_form.form_pages.shared.answers_enums import ApplyingOnOwnBehalfAnswers from .blueprint import form from .shared.render import", ".shared.session import get_errors_from_session, request_form, get_answer_from_form from .shared.validation import validate_postcode @form.route(\"/postcode-eligibility\", methods=[\"GET\"]) def get_postcode_eligibility():", "get_postcode_eligibility(): applying_on_own_behalf_answer = get_answer_from_form([\"applying_on_own_behalf\"]) if applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.YES.value: prev_path = \"/nhs-login\" elif applying_on_own_behalf_answer", "render_template_with_title( \"postcode-eligibility.html\", previous_path=prev_path, values={\"postcode\": session.get(\"postcode\", \"\")}, **get_errors_from_session(\"postcode\"), ) @form.route(\"/postcode-eligibility\", methods=[\"POST\"]) def post_postcode_verification(): session[\"postcode\"]", "validate_postcode @form.route(\"/postcode-eligibility\", methods=[\"GET\"]) def get_postcode_eligibility(): applying_on_own_behalf_answer = get_answer_from_form([\"applying_on_own_behalf\"]) if applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.YES.value: prev_path", "from .shared.routing import route_to_next_form_page from .shared.session import get_errors_from_session, request_form, get_answer_from_form from .shared.validation import", "<filename>vulnerable_people_form/form_pages/postcode_eligibility.py<gh_stars>0 from flask import redirect, session from vulnerable_people_form.form_pages.shared.answers_enums import ApplyingOnOwnBehalfAnswers from .blueprint import", "def get_postcode_eligibility(): applying_on_own_behalf_answer = get_answer_from_form([\"applying_on_own_behalf\"]) if applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.YES.value: prev_path = \"/nhs-login\" elif", "from vulnerable_people_form.form_pages.shared.answers_enums import ApplyingOnOwnBehalfAnswers from .blueprint import form from .shared.render import render_template_with_title from", "session[\"postcode\"] = request_form().get(\"postcode\") if not validate_postcode(session[\"postcode\"], \"postcode\"): return redirect(\"/postcode-eligibility\") session[\"error_items\"] = {} return", "prev_path = \"/applying-on-own-behalf\" else: raise ValueError(\"Unexpected ApplyingOnOwnBehalfAnswers value encountered: \" + applying_on_own_behalf_answer) return", "\"\")}, **get_errors_from_session(\"postcode\"), ) @form.route(\"/postcode-eligibility\", methods=[\"POST\"]) def post_postcode_verification(): session[\"postcode\"] = request_form().get(\"postcode\") if not validate_postcode(session[\"postcode\"],", "raise ValueError(\"Unexpected ApplyingOnOwnBehalfAnswers value encountered: \" + applying_on_own_behalf_answer) return render_template_with_title( \"postcode-eligibility.html\", previous_path=prev_path, values={\"postcode\":", "values={\"postcode\": session.get(\"postcode\", \"\")}, **get_errors_from_session(\"postcode\"), ) @form.route(\"/postcode-eligibility\", methods=[\"POST\"]) def post_postcode_verification(): session[\"postcode\"] = request_form().get(\"postcode\") if", ".shared.routing import route_to_next_form_page from .shared.session import get_errors_from_session, request_form, get_answer_from_form from .shared.validation import validate_postcode", "import redirect, session from vulnerable_people_form.form_pages.shared.answers_enums import ApplyingOnOwnBehalfAnswers from .blueprint import form from .shared.render", "== ApplyingOnOwnBehalfAnswers.YES.value: prev_path = \"/nhs-login\" elif applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.NO.value: prev_path = \"/applying-on-own-behalf\" else:", "import route_to_next_form_page from .shared.session import get_errors_from_session, request_form, get_answer_from_form from .shared.validation import validate_postcode @form.route(\"/postcode-eligibility\",", "applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.NO.value: prev_path = \"/applying-on-own-behalf\" else: raise ValueError(\"Unexpected ApplyingOnOwnBehalfAnswers value encountered: \"", "applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.YES.value: prev_path = \"/nhs-login\" elif applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.NO.value: prev_path = \"/applying-on-own-behalf\"", ".blueprint import form from .shared.render import render_template_with_title from .shared.routing import route_to_next_form_page from .shared.session", ".shared.validation import validate_postcode @form.route(\"/postcode-eligibility\", methods=[\"GET\"]) def get_postcode_eligibility(): applying_on_own_behalf_answer = get_answer_from_form([\"applying_on_own_behalf\"]) if applying_on_own_behalf_answer ==", "= \"/nhs-login\" elif applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.NO.value: prev_path = \"/applying-on-own-behalf\" else: raise ValueError(\"Unexpected ApplyingOnOwnBehalfAnswers", "**get_errors_from_session(\"postcode\"), ) @form.route(\"/postcode-eligibility\", methods=[\"POST\"]) def post_postcode_verification(): session[\"postcode\"] = request_form().get(\"postcode\") if not validate_postcode(session[\"postcode\"], \"postcode\"):", "import validate_postcode @form.route(\"/postcode-eligibility\", methods=[\"GET\"]) def get_postcode_eligibility(): applying_on_own_behalf_answer = get_answer_from_form([\"applying_on_own_behalf\"]) if applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.YES.value:", "session from vulnerable_people_form.form_pages.shared.answers_enums import ApplyingOnOwnBehalfAnswers from .blueprint import form from .shared.render import render_template_with_title", "import get_errors_from_session, request_form, get_answer_from_form from .shared.validation import validate_postcode @form.route(\"/postcode-eligibility\", methods=[\"GET\"]) def get_postcode_eligibility(): applying_on_own_behalf_answer", "import ApplyingOnOwnBehalfAnswers from .blueprint import form from .shared.render import render_template_with_title from .shared.routing import", "request_form, get_answer_from_form from .shared.validation import validate_postcode @form.route(\"/postcode-eligibility\", methods=[\"GET\"]) def get_postcode_eligibility(): applying_on_own_behalf_answer = get_answer_from_form([\"applying_on_own_behalf\"])", "flask import redirect, session from vulnerable_people_form.form_pages.shared.answers_enums import ApplyingOnOwnBehalfAnswers from .blueprint import form from", "@form.route(\"/postcode-eligibility\", methods=[\"POST\"]) def post_postcode_verification(): session[\"postcode\"] = request_form().get(\"postcode\") if not validate_postcode(session[\"postcode\"], \"postcode\"): return redirect(\"/postcode-eligibility\")", "post_postcode_verification(): session[\"postcode\"] = request_form().get(\"postcode\") if not validate_postcode(session[\"postcode\"], \"postcode\"): return redirect(\"/postcode-eligibility\") session[\"error_items\"] = {}", "methods=[\"GET\"]) def get_postcode_eligibility(): applying_on_own_behalf_answer = get_answer_from_form([\"applying_on_own_behalf\"]) if applying_on_own_behalf_answer == ApplyingOnOwnBehalfAnswers.YES.value: prev_path = \"/nhs-login\"" ]
[ ".scnet_bbox_head import SCNetBBoxHead from .rotated import (BBoxHeadRbbox, Shared2FCBBoxHeadRbbox, Shared4Conv1FCBBoxHeadRbbox, ConvFCBBoxHeadRbbox, MHBBoxHeadRbbox) __all__ =", "DoubleConvFCBBoxHead from .sabl_head import SABLHead from .scnet_bbox_head import SCNetBBoxHead from .rotated import (BBoxHeadRbbox,", "import SABLHead from .scnet_bbox_head import SCNetBBoxHead from .rotated import (BBoxHeadRbbox, Shared2FCBBoxHeadRbbox, Shared4Conv1FCBBoxHeadRbbox, ConvFCBBoxHeadRbbox,", "from .convfc_bbox_head import (ConvFCBBoxHead, Shared2FCBBoxHead, Shared4Conv1FCBBoxHead) from .dii_head import DIIHead from .double_bbox_head import", ".dii_head import DIIHead from .double_bbox_head import DoubleConvFCBBoxHead from .sabl_head import SABLHead from .scnet_bbox_head", "SABLHead from .scnet_bbox_head import SCNetBBoxHead from .rotated import (BBoxHeadRbbox, Shared2FCBBoxHeadRbbox, Shared4Conv1FCBBoxHeadRbbox, ConvFCBBoxHeadRbbox, MHBBoxHeadRbbox)", "from .scnet_bbox_head import SCNetBBoxHead from .rotated import (BBoxHeadRbbox, Shared2FCBBoxHeadRbbox, Shared4Conv1FCBBoxHeadRbbox, ConvFCBBoxHeadRbbox, MHBBoxHeadRbbox) __all__", "import DIIHead from .double_bbox_head import DoubleConvFCBBoxHead from .sabl_head import SABLHead from .scnet_bbox_head import", "Shared4Conv1FCBBoxHeadRbbox, ConvFCBBoxHeadRbbox, MHBBoxHeadRbbox) __all__ = [ 'BBoxHead', 'ConvFCBBoxHead', 'Shared2FCBBoxHead', 'Shared4Conv1FCBBoxHead', 'DoubleConvFCBBoxHead', 'SABLHead', 'DIIHead',", "from .double_bbox_head import DoubleConvFCBBoxHead from .sabl_head import SABLHead from .scnet_bbox_head import SCNetBBoxHead from", "__all__ = [ 'BBoxHead', 'ConvFCBBoxHead', 'Shared2FCBBoxHead', 'Shared4Conv1FCBBoxHead', 'DoubleConvFCBBoxHead', 'SABLHead', 'DIIHead', 'SCNetBBoxHead', 'BBoxHeadRbbox', 'ConvFCBBoxHeadRbbox',", "import (ConvFCBBoxHead, Shared2FCBBoxHead, Shared4Conv1FCBBoxHead) from .dii_head import DIIHead from .double_bbox_head import DoubleConvFCBBoxHead from", "from .bbox_head import BBoxHead from .convfc_bbox_head import (ConvFCBBoxHead, Shared2FCBBoxHead, Shared4Conv1FCBBoxHead) from .dii_head import", "MHBBoxHeadRbbox) __all__ = [ 'BBoxHead', 'ConvFCBBoxHead', 'Shared2FCBBoxHead', 'Shared4Conv1FCBBoxHead', 'DoubleConvFCBBoxHead', 'SABLHead', 'DIIHead', 'SCNetBBoxHead', 'BBoxHeadRbbox',", "import SCNetBBoxHead from .rotated import (BBoxHeadRbbox, Shared2FCBBoxHeadRbbox, Shared4Conv1FCBBoxHeadRbbox, ConvFCBBoxHeadRbbox, MHBBoxHeadRbbox) __all__ = [", "from .rotated import (BBoxHeadRbbox, Shared2FCBBoxHeadRbbox, Shared4Conv1FCBBoxHeadRbbox, ConvFCBBoxHeadRbbox, MHBBoxHeadRbbox) __all__ = [ 'BBoxHead', 'ConvFCBBoxHead',", "(ConvFCBBoxHead, Shared2FCBBoxHead, Shared4Conv1FCBBoxHead) from .dii_head import DIIHead from .double_bbox_head import DoubleConvFCBBoxHead from .sabl_head", ".bbox_head import BBoxHead from .convfc_bbox_head import (ConvFCBBoxHead, Shared2FCBBoxHead, Shared4Conv1FCBBoxHead) from .dii_head import DIIHead", ".sabl_head import SABLHead from .scnet_bbox_head import SCNetBBoxHead from .rotated import (BBoxHeadRbbox, Shared2FCBBoxHeadRbbox, Shared4Conv1FCBBoxHeadRbbox,", "from .dii_head import DIIHead from .double_bbox_head import DoubleConvFCBBoxHead from .sabl_head import SABLHead from", "[ 'BBoxHead', 'ConvFCBBoxHead', 'Shared2FCBBoxHead', 'Shared4Conv1FCBBoxHead', 'DoubleConvFCBBoxHead', 'SABLHead', 'DIIHead', 'SCNetBBoxHead', 'BBoxHeadRbbox', 'ConvFCBBoxHeadRbbox', 'Shared2FCBBoxHeadRbbox', 'Shared4Conv1FCBBoxHeadRbbox',", "from .sabl_head import SABLHead from .scnet_bbox_head import SCNetBBoxHead from .rotated import (BBoxHeadRbbox, Shared2FCBBoxHeadRbbox,", "SCNetBBoxHead from .rotated import (BBoxHeadRbbox, Shared2FCBBoxHeadRbbox, Shared4Conv1FCBBoxHeadRbbox, ConvFCBBoxHeadRbbox, MHBBoxHeadRbbox) __all__ = [ 'BBoxHead',", "'BBoxHead', 'ConvFCBBoxHead', 'Shared2FCBBoxHead', 'Shared4Conv1FCBBoxHead', 'DoubleConvFCBBoxHead', 'SABLHead', 'DIIHead', 'SCNetBBoxHead', 'BBoxHeadRbbox', 'ConvFCBBoxHeadRbbox', 'Shared2FCBBoxHeadRbbox', 'Shared4Conv1FCBBoxHeadRbbox', 'MHBBoxHeadRbbox'", "import BBoxHead from .convfc_bbox_head import (ConvFCBBoxHead, Shared2FCBBoxHead, Shared4Conv1FCBBoxHead) from .dii_head import DIIHead from", ".double_bbox_head import DoubleConvFCBBoxHead from .sabl_head import SABLHead from .scnet_bbox_head import SCNetBBoxHead from .rotated", "import (BBoxHeadRbbox, Shared2FCBBoxHeadRbbox, Shared4Conv1FCBBoxHeadRbbox, ConvFCBBoxHeadRbbox, MHBBoxHeadRbbox) __all__ = [ 'BBoxHead', 'ConvFCBBoxHead', 'Shared2FCBBoxHead', 'Shared4Conv1FCBBoxHead',", "Shared4Conv1FCBBoxHead) from .dii_head import DIIHead from .double_bbox_head import DoubleConvFCBBoxHead from .sabl_head import SABLHead", ".rotated import (BBoxHeadRbbox, Shared2FCBBoxHeadRbbox, Shared4Conv1FCBBoxHeadRbbox, ConvFCBBoxHeadRbbox, MHBBoxHeadRbbox) __all__ = [ 'BBoxHead', 'ConvFCBBoxHead', 'Shared2FCBBoxHead',", "'ConvFCBBoxHead', 'Shared2FCBBoxHead', 'Shared4Conv1FCBBoxHead', 'DoubleConvFCBBoxHead', 'SABLHead', 'DIIHead', 'SCNetBBoxHead', 'BBoxHeadRbbox', 'ConvFCBBoxHeadRbbox', 'Shared2FCBBoxHeadRbbox', 'Shared4Conv1FCBBoxHeadRbbox', 'MHBBoxHeadRbbox' ]", "= [ 'BBoxHead', 'ConvFCBBoxHead', 'Shared2FCBBoxHead', 'Shared4Conv1FCBBoxHead', 'DoubleConvFCBBoxHead', 'SABLHead', 'DIIHead', 'SCNetBBoxHead', 'BBoxHeadRbbox', 'ConvFCBBoxHeadRbbox', 'Shared2FCBBoxHeadRbbox',", "Shared2FCBBoxHeadRbbox, Shared4Conv1FCBBoxHeadRbbox, ConvFCBBoxHeadRbbox, MHBBoxHeadRbbox) __all__ = [ 'BBoxHead', 'ConvFCBBoxHead', 'Shared2FCBBoxHead', 'Shared4Conv1FCBBoxHead', 'DoubleConvFCBBoxHead', 'SABLHead',", "ConvFCBBoxHeadRbbox, MHBBoxHeadRbbox) __all__ = [ 'BBoxHead', 'ConvFCBBoxHead', 'Shared2FCBBoxHead', 'Shared4Conv1FCBBoxHead', 'DoubleConvFCBBoxHead', 'SABLHead', 'DIIHead', 'SCNetBBoxHead',", ".convfc_bbox_head import (ConvFCBBoxHead, Shared2FCBBoxHead, Shared4Conv1FCBBoxHead) from .dii_head import DIIHead from .double_bbox_head import DoubleConvFCBBoxHead", "import DoubleConvFCBBoxHead from .sabl_head import SABLHead from .scnet_bbox_head import SCNetBBoxHead from .rotated import", "Shared2FCBBoxHead, Shared4Conv1FCBBoxHead) from .dii_head import DIIHead from .double_bbox_head import DoubleConvFCBBoxHead from .sabl_head import", "BBoxHead from .convfc_bbox_head import (ConvFCBBoxHead, Shared2FCBBoxHead, Shared4Conv1FCBBoxHead) from .dii_head import DIIHead from .double_bbox_head", "DIIHead from .double_bbox_head import DoubleConvFCBBoxHead from .sabl_head import SABLHead from .scnet_bbox_head import SCNetBBoxHead", "(BBoxHeadRbbox, Shared2FCBBoxHeadRbbox, Shared4Conv1FCBBoxHeadRbbox, ConvFCBBoxHeadRbbox, MHBBoxHeadRbbox) __all__ = [ 'BBoxHead', 'ConvFCBBoxHead', 'Shared2FCBBoxHead', 'Shared4Conv1FCBBoxHead', 'DoubleConvFCBBoxHead'," ]
[ "else: return current_data def is_data_needed(st, data): if not data: return True now =", "{j['used_azimuth']:.1f}\" }) traces.append({ 'x': d.index, 'y': d['tangential'], 'name': f\"tangential {j['tangential_azimuth']:.1f}\" }) return {", "= sorted(p, key=itemgetter('date'), reverse=True) link = 'https://hvointernal.wr.usgs.gov/hvo_logs/read?id={}' return [[Tr([Th(col) for col in headers])]", "[d.rsam.min() - 20, 2 * d.rsam.mean()] } } } def get_tilt(ch, st): j", "chs = '15,16' url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=1740&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=240&mh.0=900&chCnt.0=7&src.0=hvo_def_tilt' \\ f'&st.0=-28800000&et.0=N&lg.0=true&ch.0={chs}' \\ f'&dataTypes.0=NaN&plotType.0=tv&rk.0=1&ds.0=None&dsInt.0=&sdt.0=' \\", "marker=dict(size=6) )] return { 'data': data, 'layout': { 'margin': { 't': 30 },", "j = api_request_to_json(f'tilt?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces = [] if not d.empty: d.set_index('date',", "key='date')).count() data = [go.Bar( { 'x': bins.index, 'y': bins.depth, 'name': 'Count' }), go.Scatter(", "return True return False def get_hypos(geo, st): j = api_request_to_json(f'hypocenter?geo={geo}&starttime={st}') d = pd.DataFrame(j['records'])", "current_data def is_data_needed(st, data): if not data: return True now = datetime.now() olddata", "{ 'title': 'Cumulative Moment (dyn-cm)', 'showgrid': False, 'overlaying': 'y', 'side': 'right' } }", "30 } } } def get_nps_so2(ch, st): j = api_request_to_json(f'npsadvisory?channel={ch}&starttime={st}') data = []", "data = [] if not d.empty: d.sort_values('date', inplace=True) d['moment'] = d.prefMag.apply(lambda x: pow(10.0,", "pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.avgso2, mode='markers', marker=dict(size=6)", "name='Wind Dir', yaxis='y2', mode='markers', marker=dict(size=6) )] return { 'data': data, 'layout': { 'margin':", "A from datetime import datetime, timedelta from flask import request from folium import", "'marker': dict( size=4 ) }) traces.append({ 'x': d.index, 'y': d.north, 'name': 'North', 'mode':", "zoom_start=11, tiles='Stamen Terrain') if kind == 'T': mid = d.date.min() mad = d.date.max()", "= api_request_to_json(f'npsadvisory?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True)", "Data is old td = now - maxdate if (td.seconds / 60) >", "elif kind == 'T': encoded_img = be(open(f'{LCL}tlegend.png', 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_hypos_table(st, data):", "m = Map(location=[19.43, -154.88], min_zoom=11, max_zoom=15, zoom_start=11, tiles='Stamen Terrain') if kind == 'T':", "- 20, 2 * d.rsam.mean()] } } } def get_tilt(ch, st): j =", "}, 'yaxis2': { 'title': 'Wind Direction (deg)', 'showgrid': False, 'overlaying': 'y', 'side': 'right'", "d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.so2,", "seconds=now.second, microseconds=now.microsecond) if 'ipensive' in src: t = '%d%s%s-%s%s' % (now.year, str(now.month).zfill(2), str(now.day).zfill(2),", "operator import itemgetter from os.path import join, dirname, realpath from random import randint", "j = api_request_to_json(f'hypocenter?geo={geo}&starttime={st}') d = pd.DataFrame(j['records']) if not d.empty: d['date'] = d['date'].str.slice(stop=-2) d['date']", "f'{TMP}hypos{randint(0,9999):04d}.html' d = json_to_dataframe(st, data) m = None if region == 'kism': m", "if not d.empty: d.sort_values('date', inplace=True) d['moment'] = d.prefMag.apply(lambda x: pow(10.0, 16.0 + ((3.0", "more than is currently stored? seconds = starttime_str_to_seconds(st) if seconds > (td.days *", "d, t) def get_helicorder(ch): url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=636&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=576&mh.0=900&chCnt.0=1' \\ f'&src.0=hvo_seismic_winston_helicorders&st.0=-28800000&et.0=N' \\ f'&chNames.0={ch}&dataTypes.0=275.000000&tc.0=15&barMult.0=3'", "d.moment.cumsum() bins = d.groupby(pd.Grouper(freq='60min', key='date')).count() data = [go.Bar( { 'x': bins.index, 'y': bins.depth,", "d[i]['date']) ]) for i in range(0, max_rows)]] def get_so2emissions(ch, st): j = api_request_to_json(f'so2emissions?channel={ch}&starttime={st}')", "# -*- coding: utf-8 -*- import pandas as pd import plotly.graph_objs as go", "d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.so2, mode='markers', marker=dict(size=10) )] return { 'data':", "'name': f\"radial {j['used_azimuth']:.1f}\" }) traces.append({ 'x': d.index, 'y': d['tangential'], 'name': f\"tangential {j['tangential_azimuth']:.1f}\" })", "= now.timetuple().tm_yday tm = now - timedelta(minutes=now.minute % 10, seconds=now.second, microseconds=now.microsecond) if 'ipensive'", "'none' } } } def get_nps_wind(ch, st): url = (f'npsadvisory?channel={ch}&starttime={st}&series=windspeed,winddir') j = api_request_to_json(url)", "return [[Tr([Th(col) for col in headers])] + [Tr([ Td(A(href=link.format(d[i]['id']), children='%s' % d[i]['subject'], target='_blank')),", "timedelta from flask import request from folium import Map from operator import itemgetter", "= [go.Scatter( x=d.index, y=d.rsam, mode='markers', marker=dict(size=4) )] return { 'data': data, 'layout': {", "min_zoom=11, max_zoom=15, zoom_start=11, tiles='Stamen Terrain') if kind == 'T': mid = d.date.min() mad", "link = 'https://hvointernal.wr.usgs.gov/hvo_logs/read?id={}' return [[Tr([Th(col) for col in headers])] + [Tr([ Td(A(href=link.format(d[i]['id']), children='%s'", "= '18,20' elif region == 'merz': chs = '15,16' url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=1740&n=1&x.0=75&y.0=20' \\", "} def get_nps_wind(ch, st): url = (f'npsadvisory?channel={ch}&starttime={st}&series=windspeed,winddir') j = api_request_to_json(url) data = []", "from random import randint from requests.auth import HTTPBasicAuth from .maputils import create_dcircle_marker, create_tcircle_marker", "= now - mindate # Requested more than is currently stored? seconds =", "f'&src.0=hvo_seismic_winston_helicorders&st.0=-28800000&et.0=N' \\ f'&chNames.0={ch}&dataTypes.0=275.000000&tc.0=15&barMult.0=3' \\ f'&sc.0=T&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_tiltv(region):", "Map from operator import itemgetter from os.path import join, dirname, realpath from random", "url = ('https://volcanoes.usgs.gov/vsc/captures/ash3d/' '332010_1008443_D_deposit.gif') return url def get_logs(max_rows=20): p = api_request_to_json('logs')['posts'] headers =", "def is_data_needed(st, data): if not data: return True now = datetime.now() olddata =", "% d[i]['subject'], target='_blank')), Td(children='%s' % d[i]['user']), Td(children='%s' % d[i]['date']) ]) for i in", "marker=dict(size=6) ), go.Scatter( x=d.index, y=d.winddir, name='Wind Dir', yaxis='y2', mode='markers', marker=dict(size=6) )] return {", "now - maxdate if (td.seconds / 60) > 10: return True return False", "{ 't': 30 } } } def get_nps_so2(ch, st): j = api_request_to_json(f'npsadvisory?channel={ch}&starttime={st}') data", "j = api_request_to_json(f'so2emissions?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date',", "* x)/2.0))) d['cmoment'] = d.moment.cumsum() bins = d.groupby(pd.Grouper(freq='60min', key='date')).count() data = [go.Bar( {", "Terrain') if kind == 'T': mid = d.date.min() mad = d.date.max() d.apply(create_tcircle_marker, arg=(m,", "'title': 'Cumulative Moment (dyn-cm)', 'showgrid': False, 'overlaying': 'y', 'side': 'right' } } }", "- timedelta(minutes=now.minute % 10, seconds=now.second, microseconds=now.microsecond) if 'ipensive' in src: t = '%d%s%s-%s%s'", "\\ f'&debias_period.0=&radial.0=T&tangential.0=T&xTilt.0=F&yTilt.0=F' \\ f'&magnitude.0=F&azimuth.0=F&holeTemp.0=F&boxTemp.0=F&instVolt.0=F' \\ f'&rainfall.0=F&vs.0=&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def", "2 * d.rsam.mean()] } } } def get_tilt(ch, st): j = api_request_to_json(f'tilt?channel={ch}&starttime={st}') d", "= api_request_to_json(f'tilt?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces = [] if not d.empty: d.set_index('date', inplace=True)", "api_request_to_json(f'npsadvisory?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data", "{ 'title': 'Wind Direction (deg)', 'showgrid': False, 'overlaying': 'y', 'side': 'right' } }", "= api_request_to_json(f'rsam?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True)", "'markers', 'marker': dict( size=4 ) }) traces.append({ 'x': d.index, 'y': d.up, 'name': 'Up',", "from .utils import ( api_request_to_json, json_to_dataframe, starttime_str_to_seconds, ) TMP = join(dirname(realpath(__file__)), '../tmp/') LCL", "f'&w.0=750&h.0=576&mh.0=900&chCnt.0=1' \\ f'&src.0=hvo_seismic_winston_helicorders&st.0=-28800000&et.0=N' \\ f'&chNames.0={ch}&dataTypes.0=275.000000&tc.0=15&barMult.0=3' \\ f'&sc.0=T&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\"", "Td(children='%s' % d[i]['user']), Td(children='%s' % d[i]['date']) ]) for i in range(0, max_rows)]] def", "(td.days * 86400 + td.seconds): return True # Data is old td =", "= requests.get(url, auth=HTTPBasicAuth(u, p)) with open(filename, 'wb') as f: f.write(r.content) return filename def", "kind == 'A': encoded_img = be(open(f'{LCL}dlegend.png', 'rb').read()) elif kind == 'T': encoded_img =", "* 86400 + td.seconds): return True # Data is old td = now", "pd.read_json(data) mindate = olddata.date.min() maxdate = olddata.date.max() td = now - mindate #", "now = datetime.utcnow() d = now.timetuple().tm_yday tm = now - timedelta(minutes=now.minute % 10,", "starttime_str_to_seconds, ) TMP = join(dirname(realpath(__file__)), '../tmp/') LCL = join(dirname(realpath(__file__)), '../images/') def get_rsam(ch, st):", "'T': mid = d.date.min() mad = d.date.max() d.apply(create_tcircle_marker, arg=(m, mid, mad), axis=1) elif", "d = pd.DataFrame(j['records']) if not d.empty: d['date'] = d['date'].str.slice(stop=-2) d['date'] = pd.to_datetime(d['date']) d.reset_index(drop=True,", "\\ f'&az.0=n&azval.0=&linetype.0=l&ysLMin.0=&ysLMax.0=&ysRMin.0=' \\ f'&ysRMax.0=&despike_period.0=&filter_arg1.0=&filter_arg2.0=' \\ f'&despike.0=F&detrend.0=F&dmo_fl.0=0&filter_arg3.0=' \\ f'&dmo_arithmetic.0=None&dmo_arithmetic_value.0=&dmo_db.0=0' \\ f'&debias_period.0=&radial.0=T&tangential.0=T&xTilt.0=F&yTilt.0=F' \\ f'&magnitude.0=F&azimuth.0=F&holeTemp.0=F&boxTemp.0=F&instVolt.0=F' \\", "'East', 'mode': 'markers', 'marker': dict( size=4 ) }) traces.append({ 'x': d.index, 'y': d.north,", "{ 'margin': { 't': 30 }, 'yaxis': { 'exponentformat': 'none' } } }", "== 'lerz': m = Map(location=[19.43, -154.88], min_zoom=11, max_zoom=15, zoom_start=11, tiles='Stamen Terrain') if kind", "d.index.max()] }, 'yaxis': { 'range': [d.rsam.min() - 20, 2 * d.rsam.mean()] } }", "= api_request_to_json(f'so2emissions?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True)", "'Up', 'mode': 'markers', 'marker': dict( size=4 ) }) return { 'data': traces, 'layout':", "d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.windspeed,", "[[Tr([Th(col) for col in headers])] + [Tr([ Td(A(href=link.format(d[i]['id']), children='%s' % d[i]['subject'], target='_blank')), Td(children='%s'", "import join, dirname, realpath from random import randint from requests.auth import HTTPBasicAuth from", "'name': 'Count' }), go.Scatter( { 'x': d.date, 'y': d.cmoment, 'name': 'Moment', 'yaxis': 'y2'", "(td.seconds / 60) > 10: return True return False def get_hypos(geo, st): j", "= pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.so2, mode='markers',", "size=4 ) }) traces.append({ 'x': d.index, 'y': d.north, 'name': 'North', 'mode': 'markers', 'marker':", "auth=HTTPBasicAuth(u, p)) with open(filename, 'wb') as f: f.write(r.content) return filename def get_ash3d_img(): url", "kind == 'A': d.apply(create_dcircle_marker, args=(m,), axis=1) m.save(filename) return open(filename, 'r').read() def get_hypos_legend(kind): encoded_img", "}) traces.append({ 'x': d.index, 'y': d.north, 'name': 'North', 'mode': 'markers', 'marker': dict( size=4", "b64encode as be from dash_html_components import Th, Tr, Td, A from datetime import", "in headers])] + [Tr([ Td(A(href=link.format(d[i]['id']), children='%s' % d[i]['subject'], target='_blank')), Td(children='%s' % d[i]['user']), Td(children='%s'", "'marker': dict( size=4 ) }) return { 'data': traces, 'layout': { 'margin': {", "itemgetter from os.path import join, dirname, realpath from random import randint from requests.auth", "d.rsam.mean()] } } } def get_tilt(ch, st): j = api_request_to_json(f'tilt?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch])", "f'&st.0=-28800000&et.0=N&lg.0=true&ch.0={chs}' \\ f'&dataTypes.0=NaN&plotType.0=tv&rk.0=1&ds.0=None&dsInt.0=&sdt.0=' \\ f'&az.0=n&azval.0=&linetype.0=l&ysLMin.0=&ysLMax.0=&ysRMin.0=' \\ f'&ysRMax.0=&despike_period.0=&filter_arg1.0=&filter_arg2.0=' \\ f'&despike.0=F&detrend.0=F&dmo_fl.0=0&filter_arg3.0=' \\ f'&dmo_arithmetic.0=None&dmo_arithmetic_value.0=&dmo_db.0=0' \\ f'&debias_period.0=&radial.0=T&tangential.0=T&xTilt.0=F&yTilt.0=F'", "d.set_index('date', inplace=True) traces.append({ 'x': d.index, 'y': d.east, 'name': 'East', 'mode': 'markers', 'marker': dict(", "= pd.read_json(data) mindate = olddata.date.min() maxdate = olddata.date.max() td = now - mindate", "% (now.year, str(now.month).zfill(2), str(now.day).zfill(2), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) else: t = '%d%s-%s%s' % (now.year, str(d).zfill(3),", "not d.empty: d.sort_values('date', inplace=True) return d.to_dict('records') def get_hypo_counts(st, data): d = json_to_dataframe(st, data)", "= 'https://hvointernal.wr.usgs.gov/hvo_logs/read?id={}' return [[Tr([Th(col) for col in headers])] + [Tr([ Td(A(href=link.format(d[i]['id']), children='%s' %", "'mode': 'markers', 'marker': dict( size=4 ) }) traces.append({ 'x': d.index, 'y': d.up, 'name':", "st): j = api_request_to_json(f'npsadvisory?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty:", "min_zoom=12, max_zoom=15, zoom_start=13, tiles='Stamen Terrain') elif region == 'lerz': m = Map(location=[19.43, -154.88],", "'Earthquakes per Hour' }, 'yaxis2': { 'title': 'Cumulative Moment (dyn-cm)', 'showgrid': False, 'overlaying':", "data) data = [] if not d.empty: d.sort_values('date', inplace=True) d['moment'] = d.prefMag.apply(lambda x:", "{ 'range': [d.rsam.min() - 20, 2 * d.rsam.mean()] } } } def get_tilt(ch,", "traces.append({ 'x': d.index, 'y': d['radial'], 'name': f\"radial {j['used_azimuth']:.1f}\" }) traces.append({ 'x': d.index, 'y':", "= api_request_to_json('logs')['posts'] headers = ['Post', 'Author', 'Date'] d = sorted(p, key=itemgetter('date'), reverse=True) link", "def get_tilt(ch, st): j = api_request_to_json(f'tilt?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces = [] if", "return True now = datetime.now() olddata = pd.read_json(data) mindate = olddata.date.min() maxdate =", "'mode': 'markers', 'marker': dict( size=4 ) }) traces.append({ 'x': d.index, 'y': d.north, 'name':", "'kism': m = Map(location=[19.41, -155.27], min_zoom=12, max_zoom=15, zoom_start=13, tiles='Stamen Terrain') elif region ==", "= d.date.min() mad = d.date.max() d.apply(create_tcircle_marker, arg=(m, mid, mad), axis=1) elif kind ==", "{ 'margin': { 't': 30 }, 'showlegend': False, 'yaxis': { 'title': 'Earthquakes per", "stored? seconds = starttime_str_to_seconds(st) if seconds > (td.days * 86400 + td.seconds): return", "is currently stored? seconds = starttime_str_to_seconds(st) if seconds > (td.days * 86400 +", "in src: t = '%d%s%s-%s%s' % (now.year, str(now.month).zfill(2), str(now.day).zfill(2), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) else: t", "LCL = join(dirname(realpath(__file__)), '../images/') def get_rsam(ch, st): j = api_request_to_json(f'rsam?channel={ch}&starttime={st}') data = []", "+ ((3.0 * x)/2.0))) d['cmoment'] = d.moment.cumsum() bins = d.groupby(pd.Grouper(freq='60min', key='date')).count() data =", "f'&rainfall.0=F&vs.0=&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_valve_plot(itm): filename = f'{TMP}valve{randint(0,9999):04d}.jpg' url", "yaxis='y2', mode='markers', marker=dict(size=6) )] return { 'data': data, 'layout': { 'margin': { 't':", "Terrain') elif region == 'lerz': m = Map(location=[19.43, -154.88], min_zoom=11, max_zoom=15, zoom_start=11, tiles='Stamen", "d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.rsam, mode='markers', marker=dict(size=4) )] return { 'data':", "y=d.avgso2, mode='markers', marker=dict(size=6) )] return { 'data': data, 'layout': { 'margin': { 't':", "> 10: return True return False def get_hypos(geo, st): j = api_request_to_json(f'hypocenter?geo={geo}&starttime={st}') d", "'yaxis': { 'title': 'Windspeed (m/s)' }, 'yaxis2': { 'title': 'Wind Direction (deg)', 'showgrid':", "folium import Map from operator import itemgetter from os.path import join, dirname, realpath", "reverse=True) link = 'https://hvointernal.wr.usgs.gov/hvo_logs/read?id={}' return [[Tr([Th(col) for col in headers])] + [Tr([ Td(A(href=link.format(d[i]['id']),", "if 'ipensive' in src: t = '%d%s%s-%s%s' % (now.year, str(now.month).zfill(2), str(now.day).zfill(2), str(tm.hour).zfill(2), str(tm.minute).zfill(2))", "\\ f'&ysRMax.0=&despike_period.0=&filter_arg1.0=&filter_arg2.0=' \\ f'&despike.0=F&detrend.0=F&dmo_fl.0=0&filter_arg3.0=' \\ f'&dmo_arithmetic.0=None&dmo_arithmetic_value.0=&dmo_db.0=0' \\ f'&debias_period.0=&radial.0=T&tangential.0=T&xTilt.0=F&yTilt.0=F' \\ f'&magnitude.0=F&azimuth.0=F&holeTemp.0=F&boxTemp.0=F&instVolt.0=F' \\ f'&rainfall.0=F&vs.0=&plotSeparately.0=false' encoded_img", "create_dcircle_marker, create_tcircle_marker from .utils import ( api_request_to_json, json_to_dataframe, starttime_str_to_seconds, ) TMP = join(dirname(realpath(__file__)),", "[] if not d.empty: d.set_index('date', inplace=True) traces.append({ 'x': d.index, 'y': d['radial'], 'name': f\"radial", "st, current_data): if is_data_needed(st, current_data): return get_hypos(geo, st).to_json() else: return current_data def is_data_needed(st,", "= starttime_str_to_seconds(st) if seconds > (td.days * 86400 + td.seconds): return True #", "datetime.utcnow() d = now.timetuple().tm_yday tm = now - timedelta(minutes=now.minute % 10, seconds=now.second, microseconds=now.microsecond)", "p = api_request_to_json('logs')['posts'] headers = ['Post', 'Author', 'Date'] d = sorted(p, key=itemgetter('date'), reverse=True)", "'data': data, 'layout': { 'margin': { 't': 30 } } } def get_nps_so2(ch,", "'data': data, 'layout': { 'margin': { 't': 30 }, 'xaxis': { 'range': [d.index.min(),", "not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.rsam, mode='markers', marker=dict(size=4) )] return", "from requests.auth import HTTPBasicAuth from .maputils import create_dcircle_marker, create_tcircle_marker from .utils import (", "Requested more than is currently stored? seconds = starttime_str_to_seconds(st) if seconds > (td.days", "def get_and_store_hypos(geo, st, current_data): if is_data_needed(st, current_data): return get_hypos(geo, st).to_json() else: return current_data", "coding: utf-8 -*- import pandas as pd import plotly.graph_objs as go import requests", "seconds = starttime_str_to_seconds(st) if seconds > (td.days * 86400 + td.seconds): return True", "d.index, 'y': d.east, 'name': 'East', 'mode': 'markers', 'marker': dict( size=4 ) }) traces.append({", "tiles='Stamen Terrain') if kind == 'T': mid = d.date.min() mad = d.date.max() d.apply(create_tcircle_marker,", "'332010_1008443_D_deposit.gif') return url def get_logs(max_rows=20): p = api_request_to_json('logs')['posts'] headers = ['Post', 'Author', 'Date']", "'y': d.east, 'name': 'East', 'mode': 'markers', 'marker': dict( size=4 ) }) traces.append({ 'x':", "'%d%s%s-%s%s' % (now.year, str(now.month).zfill(2), str(now.day).zfill(2), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) else: t = '%d%s-%s%s' % (now.year,", ")] return { 'data': data, 'layout': { 'margin': { 't': 30 } }", "if kind == 'T': mid = d.date.min() mad = d.date.max() d.apply(create_tcircle_marker, arg=(m, mid,", "10, seconds=now.second, microseconds=now.microsecond) if 'ipensive' in src: t = '%d%s%s-%s%s' % (now.year, str(now.month).zfill(2),", "== 'kism': chs = '18,20' elif region == 'merz': chs = '15,16' url", "inplace=True) data = [go.Scatter( x=d.index, y=d.avgso2, mode='markers', marker=dict(size=6) )] return { 'data': data,", "import request from folium import Map from operator import itemgetter from os.path import", "max_rows)]] def get_so2emissions(ch, st): j = api_request_to_json(f'so2emissions?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch])", "'%d%s-%s%s' % (now.year, str(d).zfill(3), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) return src.format(now.year, d, t) def get_helicorder(ch): url", "'t': 30 }, 'showlegend': False, 'yaxis': { 'title': 'Earthquakes per Hour' }, 'yaxis2':", "= Map(location=[19.43, -154.88], min_zoom=11, max_zoom=15, zoom_start=11, tiles='Stamen Terrain') if kind == 'T': mid", "mad = d.date.max() d.apply(create_tcircle_marker, arg=(m, mid, mad), axis=1) elif kind == 'A': d.apply(create_dcircle_marker,", "traces.append({ 'x': d.index, 'y': d['tangential'], 'name': f\"tangential {j['tangential_azimuth']:.1f}\" }) return { 'data': traces,", "'layout': { 'margin': { 't': 30 } } } def get_nps_so2(ch, st): j", "'x': d.index, 'y': d.east, 'name': 'East', 'mode': 'markers', 'marker': dict( size=4 ) })", "((3.0 * x)/2.0))) d['cmoment'] = d.moment.cumsum() bins = d.groupby(pd.Grouper(freq='60min', key='date')).count() data = [go.Bar(", "now = datetime.now() olddata = pd.read_json(data) mindate = olddata.date.min() maxdate = olddata.date.max() td", "return False def get_hypos(geo, st): j = api_request_to_json(f'hypocenter?geo={geo}&starttime={st}') d = pd.DataFrame(j['records']) if not", "d.index, 'y': d['tangential'], 'name': f\"tangential {j['tangential_azimuth']:.1f}\" }) return { 'data': traces, 'layout': {", "x=d.index, y=d.winddir, name='Wind Dir', yaxis='y2', mode='markers', marker=dict(size=6) )] return { 'data': data, 'layout':", "now.timetuple().tm_yday tm = now - timedelta(minutes=now.minute % 10, seconds=now.second, microseconds=now.microsecond) if 'ipensive' in", "data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data =", "f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_tiltv(region): chs = '' if region == 'kism': chs = '18,20'", "traces = [] if not d.empty: d.set_index('date', inplace=True) traces.append({ 'x': d.index, 'y': d['radial'],", "60) > 10: return True return False def get_hypos(geo, st): j = api_request_to_json(f'hypocenter?geo={geo}&starttime={st}')", "base64 import b64encode as be from dash_html_components import Th, Tr, Td, A from", "'x': d.date, 'y': d.cmoment, 'name': 'Moment', 'yaxis': 'y2' })] return { 'data': data,", "None if kind == 'A': encoded_img = be(open(f'{LCL}dlegend.png', 'rb').read()) elif kind == 'T':", "get_tiltv(region): chs = '' if region == 'kism': chs = '18,20' elif region", "json_to_dataframe(st, data) if not d.empty: d.sort_values('date', inplace=True) return d.to_dict('records') def get_hypo_counts(st, data): d", "not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.so2, mode='markers', marker=dict(size=10) )] return", "} } def get_nps_so2(ch, st): j = api_request_to_json(f'npsadvisory?channel={ch}&starttime={st}') data = [] d =", "st).to_json() else: return current_data def is_data_needed(st, data): if not data: return True now", "data = [go.Scatter( x=d.index, y=d.windspeed, name='Wind Speed', mode='markers', marker=dict(size=6) ), go.Scatter( x=d.index, y=d.winddir,", "get_hypos(geo, st).to_json() else: return current_data def is_data_needed(st, data): if not data: return True", "d.groupby(pd.Grouper(freq='60min', key='date')).count() data = [go.Bar( { 'x': bins.index, 'y': bins.depth, 'name': 'Count' }),", "st): url = (f'npsadvisory?channel={ch}&starttime={st}&series=windspeed,winddir') j = api_request_to_json(url) data = [] d = pd.DataFrame(j['records'][ch])", "kind, data, region): filename = f'{TMP}hypos{randint(0,9999):04d}.html' d = json_to_dataframe(st, data) m = None", "f'&chNames.0={ch}&dataTypes.0=275.000000&tc.0=15&barMult.0=3' \\ f'&sc.0=T&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_tiltv(region): chs =", "{ 't': 30 }, 'yaxis': { 'title': 'Windspeed (m/s)' }, 'yaxis2': { 'title':", "d[i]['user']), Td(children='%s' % d[i]['date']) ]) for i in range(0, max_rows)]] def get_so2emissions(ch, st):", "\\ f'&st.0=-28800000&et.0=N&lg.0=true&ch.0={chs}' \\ f'&dataTypes.0=NaN&plotType.0=tv&rk.0=1&ds.0=None&dsInt.0=&sdt.0=' \\ f'&az.0=n&azval.0=&linetype.0=l&ysLMin.0=&ysLMax.0=&ysRMin.0=' \\ f'&ysRMax.0=&despike_period.0=&filter_arg1.0=&filter_arg2.0=' \\ f'&despike.0=F&detrend.0=F&dmo_fl.0=0&filter_arg3.0=' \\ f'&dmo_arithmetic.0=None&dmo_arithmetic_value.0=&dmo_db.0=0' \\", "f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_hypos_table(st, data): d = json_to_dataframe(st, data) if not d.empty: d.sort_values('date', inplace=True)", "bins.index, 'y': bins.depth, 'name': 'Count' }), go.Scatter( { 'x': d.date, 'y': d.cmoment, 'name':", "join(dirname(realpath(__file__)), '../images/') def get_rsam(ch, st): j = api_request_to_json(f'rsam?channel={ch}&starttime={st}') data = [] d =", "d.to_dict('records') def get_hypo_counts(st, data): d = json_to_dataframe(st, data) data = [] if not", "= olddata.date.min() maxdate = olddata.date.max() td = now - mindate # Requested more", "pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.windspeed, name='Wind Speed',", "def get_hypos_legend(kind): encoded_img = None if kind == 'A': encoded_img = be(open(f'{LCL}dlegend.png', 'rb').read())", "src: t = '%d%s%s-%s%s' % (now.year, str(now.month).zfill(2), str(now.day).zfill(2), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) else: t =", "str(now.day).zfill(2), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) else: t = '%d%s-%s%s' % (now.year, str(d).zfill(3), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) return", ") }) traces.append({ 'x': d.index, 'y': d.up, 'name': 'Up', 'mode': 'markers', 'marker': dict(", "= f'{TMP}hypos{randint(0,9999):04d}.html' d = json_to_dataframe(st, data) m = None if region == 'kism':", "f: f.write(r.content) return filename def get_ash3d_img(): url = ('https://volcanoes.usgs.gov/vsc/captures/ash3d/' '332010_1008443_D_deposit.gif') return url def", "utf-8 -*- import pandas as pd import plotly.graph_objs as go import requests from", "if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.so2, mode='markers', marker=dict(size=10) )]", "Hour' }, 'yaxis2': { 'title': 'Cumulative Moment (dyn-cm)', 'showgrid': False, 'overlaying': 'y', 'side':", "'y': d.up, 'name': 'Up', 'mode': 'markers', 'marker': dict( size=4 ) }) return {", "open(filename, 'wb') as f: f.write(r.content) return filename def get_ash3d_img(): url = ('https://volcanoes.usgs.gov/vsc/captures/ash3d/' '332010_1008443_D_deposit.gif')", "data = [go.Scatter( x=d.index, y=d.so2, mode='markers', marker=dict(size=10) )] return { 'data': data, 'layout':", "\\ f'&w.0=750&h.0=576&mh.0=900&chCnt.0=1' \\ f'&src.0=hvo_seismic_winston_helicorders&st.0=-28800000&et.0=N' \\ f'&chNames.0={ch}&dataTypes.0=275.000000&tc.0=15&barMult.0=3' \\ f'&sc.0=T&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read()) return", "'' if region == 'kism': chs = '18,20' elif region == 'merz': chs", "mode='markers', marker=dict(size=6) )] return { 'data': data, 'layout': { 'margin': { 't': 30", "{ 't': 30 }, 'yaxis': { 'exponentformat': 'none' } } } def get_nps_wind(ch,", "f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_valve_plot(itm): filename = f'{TMP}valve{randint(0,9999):04d}.jpg' url = f'https://hvovalve.wr.usgs.gov/valve3/valve3.jsp?{itm}' u = request.authorization.username p", "Map(location=[19.41, -155.27], min_zoom=12, max_zoom=15, zoom_start=13, tiles='Stamen Terrain') elif region == 'lerz': m =", "= json_to_dataframe(st, data) data = [] if not d.empty: d.sort_values('date', inplace=True) d['moment'] =", "(m/s)' }, 'yaxis2': { 'title': 'Wind Direction (deg)', 'showgrid': False, 'overlaying': 'y', 'side':", "'kism': chs = '18,20' elif region == 'merz': chs = '15,16' url =", "-155.27], min_zoom=12, max_zoom=15, zoom_start=13, tiles='Stamen Terrain') elif region == 'lerz': m = Map(location=[19.43,", "'margin': { 't': 30 } } } def get_rtnet(ch, st): j = api_request_to_json(f'rtnet?channel={ch}&starttime={st}')", "data): d = json_to_dataframe(st, data) data = [] if not d.empty: d.sort_values('date', inplace=True)", "{ 'range': [d.index.min(), d.index.max()] }, 'yaxis': { 'range': [d.rsam.min() - 20, 2 *", ") }) return { 'data': traces, 'layout': { 'margin': { 't': 30 }", "False def get_hypos(geo, st): j = api_request_to_json(f'hypocenter?geo={geo}&starttime={st}') d = pd.DataFrame(j['records']) if not d.empty:", "{ 'margin': { 't': 30 } } } def get_and_store_hypos(geo, st, current_data): if", "url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=636&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=576&mh.0=900&chCnt.0=1' \\ f'&src.0=hvo_seismic_winston_helicorders&st.0=-28800000&et.0=N' \\ f'&chNames.0={ch}&dataTypes.0=275.000000&tc.0=15&barMult.0=3' \\ f'&sc.0=T&plotSeparately.0=false' encoded_img =", "get_ash3d_img(): url = ('https://volcanoes.usgs.gov/vsc/captures/ash3d/' '332010_1008443_D_deposit.gif') return url def get_logs(max_rows=20): p = api_request_to_json('logs')['posts'] headers", "= pd.DataFrame(j['records']) if not d.empty: d['date'] = d['date'].str.slice(stop=-2) d['date'] = pd.to_datetime(d['date']) d.reset_index(drop=True, inplace=True)", "with open(filename, 'wb') as f: f.write(r.content) return filename def get_ash3d_img(): url = ('https://volcanoes.usgs.gov/vsc/captures/ash3d/'", "get_tilt(ch, st): j = api_request_to_json(f'tilt?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces = [] if not", "'layout': { 'margin': { 't': 30 }, 'yaxis': { 'exponentformat': 'none' } }", "inplace=True) data = [go.Scatter( x=d.index, y=d.windspeed, name='Wind Speed', mode='markers', marker=dict(size=6) ), go.Scatter( x=d.index,", "api_request_to_json(f'hypocenter?geo={geo}&starttime={st}') d = pd.DataFrame(j['records']) if not d.empty: d['date'] = d['date'].str.slice(stop=-2) d['date'] = pd.to_datetime(d['date'])", "t = '%d%s-%s%s' % (now.year, str(d).zfill(3), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) return src.format(now.year, d, t) def", "d.empty: d['date'] = d['date'].str.slice(stop=-2) d['date'] = pd.to_datetime(d['date']) d.reset_index(drop=True, inplace=True) return d def get_hypos_map(st,", "Dir', yaxis='y2', mode='markers', marker=dict(size=6) )] return { 'data': data, 'layout': { 'margin': {", "{ 't': 30 } } } def get_and_store_hypos(geo, st, current_data): if is_data_needed(st, current_data):", "if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.rsam, mode='markers', marker=dict(size=4) )]", "[go.Scatter( x=d.index, y=d.avgso2, mode='markers', marker=dict(size=6) )] return { 'data': data, 'layout': { 'margin':", "'x': d.index, 'y': d.north, 'name': 'North', 'mode': 'markers', 'marker': dict( size=4 ) })", "data = [go.Bar( { 'x': bins.index, 'y': bins.depth, 'name': 'Count' }), go.Scatter( {", "d = json_to_dataframe(st, data) m = None if region == 'kism': m =", "{ 't': 30 }, 'xaxis': { 'range': [d.index.min(), d.index.max()] }, 'yaxis': { 'range':", "== 'T': mid = d.date.min() mad = d.date.max() d.apply(create_tcircle_marker, arg=(m, mid, mad), axis=1)", "data) m = None if region == 'kism': m = Map(location=[19.41, -155.27], min_zoom=12,", "= join(dirname(realpath(__file__)), '../images/') def get_rsam(ch, st): j = api_request_to_json(f'rsam?channel={ch}&starttime={st}') data = [] d", "elif kind == 'A': d.apply(create_dcircle_marker, args=(m,), axis=1) m.save(filename) return open(filename, 'r').read() def get_hypos_legend(kind):", "== 'A': d.apply(create_dcircle_marker, args=(m,), axis=1) m.save(filename) return open(filename, 'r').read() def get_hypos_legend(kind): encoded_img =", "return { 'data': data, 'layout': { 'margin': { 't': 30 }, 'showlegend': False,", "} def get_rtnet(ch, st): j = api_request_to_json(f'rtnet?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces = []", "f'&w.0=750&h.0=240&mh.0=900&chCnt.0=7&src.0=hvo_def_tilt' \\ f'&st.0=-28800000&et.0=N&lg.0=true&ch.0={chs}' \\ f'&dataTypes.0=NaN&plotType.0=tv&rk.0=1&ds.0=None&dsInt.0=&sdt.0=' \\ f'&az.0=n&azval.0=&linetype.0=l&ysLMin.0=&ysLMax.0=&ysRMin.0=' \\ f'&ysRMax.0=&despike_period.0=&filter_arg1.0=&filter_arg2.0=' \\ f'&despike.0=F&detrend.0=F&dmo_fl.0=0&filter_arg3.0=' \\ f'&dmo_arithmetic.0=None&dmo_arithmetic_value.0=&dmo_db.0=0'", "} def get_tilt(ch, st): j = api_request_to_json(f'tilt?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces = []", "x: pow(10.0, 16.0 + ((3.0 * x)/2.0))) d['cmoment'] = d.moment.cumsum() bins = d.groupby(pd.Grouper(freq='60min',", "return True # Data is old td = now - maxdate if (td.seconds", "'markers', 'marker': dict( size=4 ) }) return { 'data': traces, 'layout': { 'margin':", "get_hypos(geo, st): j = api_request_to_json(f'hypocenter?geo={geo}&starttime={st}') d = pd.DataFrame(j['records']) if not d.empty: d['date'] =", "d.index, 'y': d.up, 'name': 'Up', 'mode': 'markers', 'marker': dict( size=4 ) }) return", "d['date'] = pd.to_datetime(d['date']) d.reset_index(drop=True, inplace=True) return d def get_hypos_map(st, kind, data, region): filename", "is_data_needed(st, data): if not data: return True now = datetime.now() olddata = pd.read_json(data)", "d.apply(create_tcircle_marker, arg=(m, mid, mad), axis=1) elif kind == 'A': d.apply(create_dcircle_marker, args=(m,), axis=1) m.save(filename)", "m = None if region == 'kism': m = Map(location=[19.41, -155.27], min_zoom=12, max_zoom=15,", "p)) with open(filename, 'wb') as f: f.write(r.content) return filename def get_ash3d_img(): url =", "'../tmp/') LCL = join(dirname(realpath(__file__)), '../images/') def get_rsam(ch, st): j = api_request_to_json(f'rsam?channel={ch}&starttime={st}') data =", "return url def get_logs(max_rows=20): p = api_request_to_json('logs')['posts'] headers = ['Post', 'Author', 'Date'] d", "datetime.now() olddata = pd.read_json(data) mindate = olddata.date.min() maxdate = olddata.date.max() td = now", "== 'T': encoded_img = be(open(f'{LCL}tlegend.png', 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_hypos_table(st, data): d =", "get_so2emissions(ch, st): j = api_request_to_json(f'so2emissions?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if not", "'margin': { 't': 30 }, 'xaxis': { 'range': [d.index.min(), d.index.max()] }, 'yaxis': {", "if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.avgso2, mode='markers', marker=dict(size=6) )]", "target='_blank')), Td(children='%s' % d[i]['user']), Td(children='%s' % d[i]['date']) ]) for i in range(0, max_rows)]]", "} } } def get_nps_wind(ch, st): url = (f'npsadvisory?channel={ch}&starttime={st}&series=windspeed,winddir') j = api_request_to_json(url) data", "'../images/') def get_rsam(ch, st): j = api_request_to_json(f'rsam?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch])", "st): j = api_request_to_json(f'tilt?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces = [] if not d.empty:", "Tr, Td, A from datetime import datetime, timedelta from flask import request from", "= [] d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter(", "= d['date'].str.slice(stop=-2) d['date'] = pd.to_datetime(d['date']) d.reset_index(drop=True, inplace=True) return d def get_hypos_map(st, kind, data,", "'ipensive' in src: t = '%d%s%s-%s%s' % (now.year, str(now.month).zfill(2), str(now.day).zfill(2), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) else:", "('https://volcanoes.usgs.gov/vsc/captures/ash3d/' '332010_1008443_D_deposit.gif') return url def get_logs(max_rows=20): p = api_request_to_json('logs')['posts'] headers = ['Post', 'Author',", "} } def get_nps_wind(ch, st): url = (f'npsadvisory?channel={ch}&starttime={st}&series=windspeed,winddir') j = api_request_to_json(url) data =", "{ 'x': d.date, 'y': d.cmoment, 'name': 'Moment', 'yaxis': 'y2' })] return { 'data':", "'Count' }), go.Scatter( { 'x': d.date, 'y': d.cmoment, 'name': 'Moment', 'yaxis': 'y2' })]", "inplace=True) return d def get_hypos_map(st, kind, data, region): filename = f'{TMP}hypos{randint(0,9999):04d}.html' d =", "if not d.empty: d.set_index('date', inplace=True) traces.append({ 'x': d.index, 'y': d.east, 'name': 'East', 'mode':", "be(open(f'{LCL}tlegend.png', 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_hypos_table(st, data): d = json_to_dataframe(st, data) if not", "from base64 import b64encode as be from dash_html_components import Th, Tr, Td, A", "return current_data def is_data_needed(st, data): if not data: return True now = datetime.now()", "mid, mad), axis=1) elif kind == 'A': d.apply(create_dcircle_marker, args=(m,), axis=1) m.save(filename) return open(filename,", ")] return { 'data': data, 'layout': { 'margin': { 't': 30 }, 'xaxis':", "data, 'layout': { 'margin': { 't': 30 }, 'xaxis': { 'range': [d.index.min(), d.index.max()]", "= d.date.max() d.apply(create_tcircle_marker, arg=(m, mid, mad), axis=1) elif kind == 'A': d.apply(create_dcircle_marker, args=(m,),", "st): j = api_request_to_json(f'so2emissions?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty:", "open(filename, 'r').read() def get_hypos_legend(kind): encoded_img = None if kind == 'A': encoded_img =", "starttime_str_to_seconds(st) if seconds > (td.days * 86400 + td.seconds): return True # Data", "than is currently stored? seconds = starttime_str_to_seconds(st) if seconds > (td.days * 86400", "'y', 'side': 'right' } } } def get_spectrogram(src): now = datetime.utcnow() d =", "'layout': { 'margin': { 't': 30 }, 'xaxis': { 'range': [d.index.min(), d.index.max()] },", "'T': encoded_img = be(open(f'{LCL}tlegend.png', 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_hypos_table(st, data): d = json_to_dataframe(st,", "f.write(r.content) return filename def get_ash3d_img(): url = ('https://volcanoes.usgs.gov/vsc/captures/ash3d/' '332010_1008443_D_deposit.gif') return url def get_logs(max_rows=20):", "data, region): filename = f'{TMP}hypos{randint(0,9999):04d}.html' d = json_to_dataframe(st, data) m = None if", "d.prefMag.apply(lambda x: pow(10.0, 16.0 + ((3.0 * x)/2.0))) d['cmoment'] = d.moment.cumsum() bins =", "if region == 'kism': chs = '18,20' elif region == 'merz': chs =", "def get_hypos_table(st, data): d = json_to_dataframe(st, data) if not d.empty: d.sort_values('date', inplace=True) return", "20, 2 * d.rsam.mean()] } } } def get_tilt(ch, st): j = api_request_to_json(f'tilt?channel={ch}&starttime={st}')", "d.up, 'name': 'Up', 'mode': 'markers', 'marker': dict( size=4 ) }) return { 'data':", "axis=1) elif kind == 'A': d.apply(create_dcircle_marker, args=(m,), axis=1) m.save(filename) return open(filename, 'r').read() def", "str(tm.hour).zfill(2), str(tm.minute).zfill(2)) else: t = '%d%s-%s%s' % (now.year, str(d).zfill(3), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) return src.format(now.year,", "'t': 30 }, 'yaxis': { 'title': 'Windspeed (m/s)' }, 'yaxis2': { 'title': 'Wind", "} def get_and_store_hypos(geo, st, current_data): if is_data_needed(st, current_data): return get_hypos(geo, st).to_json() else: return", "> (td.days * 86400 + td.seconds): return True # Data is old td", "mode='markers', marker=dict(size=6) ), go.Scatter( x=d.index, y=d.winddir, name='Wind Dir', yaxis='y2', mode='markers', marker=dict(size=6) )] return", "if is_data_needed(st, current_data): return get_hypos(geo, st).to_json() else: return current_data def is_data_needed(st, data): if", "get_hypos_map(st, kind, data, region): filename = f'{TMP}hypos{randint(0,9999):04d}.html' d = json_to_dataframe(st, data) m =", "m = Map(location=[19.41, -155.27], min_zoom=12, max_zoom=15, zoom_start=13, tiles='Stamen Terrain') elif region == 'lerz':", "'name': 'Up', 'mode': 'markers', 'marker': dict( size=4 ) }) return { 'data': traces,", "import randint from requests.auth import HTTPBasicAuth from .maputils import create_dcircle_marker, create_tcircle_marker from .utils", "t = '%d%s%s-%s%s' % (now.year, str(now.month).zfill(2), str(now.day).zfill(2), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) else: t = '%d%s-%s%s'", "not d.empty: d.set_index('date', inplace=True) traces.append({ 'x': d.index, 'y': d.east, 'name': 'East', 'mode': 'markers',", "tiles='Stamen Terrain') elif region == 'lerz': m = Map(location=[19.43, -154.88], min_zoom=11, max_zoom=15, zoom_start=11,", "realpath from random import randint from requests.auth import HTTPBasicAuth from .maputils import create_dcircle_marker,", "d = json_to_dataframe(st, data) if not d.empty: d.sort_values('date', inplace=True) return d.to_dict('records') def get_hypo_counts(st,", "get_valve_plot(itm): filename = f'{TMP}valve{randint(0,9999):04d}.jpg' url = f'https://hvovalve.wr.usgs.gov/valve3/valve3.jsp?{itm}' u = request.authorization.username p = request.authorization.password", "return filename def get_ash3d_img(): url = ('https://volcanoes.usgs.gov/vsc/captures/ash3d/' '332010_1008443_D_deposit.gif') return url def get_logs(max_rows=20): p", "from operator import itemgetter from os.path import join, dirname, realpath from random import", "'marker': dict( size=4 ) }) traces.append({ 'x': d.index, 'y': d.up, 'name': 'Up', 'mode':", "f\"radial {j['used_azimuth']:.1f}\" }) traces.append({ 'x': d.index, 'y': d['tangential'], 'name': f\"tangential {j['tangential_azimuth']:.1f}\" }) return", "} } } def get_nps_so2(ch, st): j = api_request_to_json(f'npsadvisory?channel={ch}&starttime={st}') data = [] d", "\\ f'&dataTypes.0=NaN&plotType.0=tv&rk.0=1&ds.0=None&dsInt.0=&sdt.0=' \\ f'&az.0=n&azval.0=&linetype.0=l&ysLMin.0=&ysLMax.0=&ysRMin.0=' \\ f'&ysRMax.0=&despike_period.0=&filter_arg1.0=&filter_arg2.0=' \\ f'&despike.0=F&detrend.0=F&dmo_fl.0=0&filter_arg3.0=' \\ f'&dmo_arithmetic.0=None&dmo_arithmetic_value.0=&dmo_db.0=0' \\ f'&debias_period.0=&radial.0=T&tangential.0=T&xTilt.0=F&yTilt.0=F' \\", "x=d.index, y=d.windspeed, name='Wind Speed', mode='markers', marker=dict(size=6) ), go.Scatter( x=d.index, y=d.winddir, name='Wind Dir', yaxis='y2',", "'data': data, 'layout': { 'margin': { 't': 30 }, 'showlegend': False, 'yaxis': {", "{ 't': 30 }, 'showlegend': False, 'yaxis': { 'title': 'Earthquakes per Hour' },", "api_request_to_json(f'rtnet?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces = [] if not d.empty: d.set_index('date', inplace=True) traces.append({", "\\ f'&w.0=750&h.0=240&mh.0=900&chCnt.0=7&src.0=hvo_def_tilt' \\ f'&st.0=-28800000&et.0=N&lg.0=true&ch.0={chs}' \\ f'&dataTypes.0=NaN&plotType.0=tv&rk.0=1&ds.0=None&dsInt.0=&sdt.0=' \\ f'&az.0=n&azval.0=&linetype.0=l&ysLMin.0=&ysLMax.0=&ysRMin.0=' \\ f'&ysRMax.0=&despike_period.0=&filter_arg1.0=&filter_arg2.0=' \\ f'&despike.0=F&detrend.0=F&dmo_fl.0=0&filter_arg3.0=' \\", "json_to_dataframe(st, data) m = None if region == 'kism': m = Map(location=[19.41, -155.27],", "f'&debias_period.0=&radial.0=T&tangential.0=T&xTilt.0=F&yTilt.0=F' \\ f'&magnitude.0=F&azimuth.0=F&holeTemp.0=F&boxTemp.0=F&instVolt.0=F' \\ f'&rainfall.0=F&vs.0=&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_valve_plot(itm):", "{ 'data': data, 'layout': { 'margin': { 't': 30 } } } def", "get_nps_wind(ch, st): url = (f'npsadvisory?channel={ch}&starttime={st}&series=windspeed,winddir') j = api_request_to_json(url) data = [] d =", "{ 'title': 'Windspeed (m/s)' }, 'yaxis2': { 'title': 'Wind Direction (deg)', 'showgrid': False,", "j = api_request_to_json(f'rsam?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date',", "\\ f'&dmo_arithmetic.0=None&dmo_arithmetic_value.0=&dmo_db.0=0' \\ f'&debias_period.0=&radial.0=T&tangential.0=T&xTilt.0=F&yTilt.0=F' \\ f'&magnitude.0=F&azimuth.0=F&holeTemp.0=F&boxTemp.0=F&instVolt.0=F' \\ f'&rainfall.0=F&vs.0=&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read()) return", "encoded_img = be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_tiltv(region): chs = '' if region", "{j['tangential_azimuth']:.1f}\" }) return { 'data': traces, 'layout': { 'margin': { 't': 30 }", "bins = d.groupby(pd.Grouper(freq='60min', key='date')).count() data = [go.Bar( { 'x': bins.index, 'y': bins.depth, 'name':", "d = now.timetuple().tm_yday tm = now - timedelta(minutes=now.minute % 10, seconds=now.second, microseconds=now.microsecond) if", "'margin': { 't': 30 }, 'yaxis': { 'title': 'Windspeed (m/s)' }, 'yaxis2': {", "def get_hypos_map(st, kind, data, region): filename = f'{TMP}hypos{randint(0,9999):04d}.html' d = json_to_dataframe(st, data) m", "'yaxis2': { 'title': 'Wind Direction (deg)', 'showgrid': False, 'overlaying': 'y', 'side': 'right' }", "} } def get_tilt(ch, st): j = api_request_to_json(f'tilt?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces =", "d.empty: d.sort_values('date', inplace=True) d['moment'] = d.prefMag.apply(lambda x: pow(10.0, 16.0 + ((3.0 * x)/2.0)))", "d.date.min() mad = d.date.max() d.apply(create_tcircle_marker, arg=(m, mid, mad), axis=1) elif kind == 'A':", "maxdate = olddata.date.max() td = now - mindate # Requested more than is", "m.save(filename) return open(filename, 'r').read() def get_hypos_legend(kind): encoded_img = None if kind == 'A':", "== 'merz': chs = '15,16' url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=1740&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=240&mh.0=900&chCnt.0=7&src.0=hvo_def_tilt' \\ f'&st.0=-28800000&et.0=N&lg.0=true&ch.0={chs}' \\", "url def get_logs(max_rows=20): p = api_request_to_json('logs')['posts'] headers = ['Post', 'Author', 'Date'] d =", "api_request_to_json(url) data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data", "} def get_spectrogram(src): now = datetime.utcnow() d = now.timetuple().tm_yday tm = now -", "url = f'https://hvovalve.wr.usgs.gov/valve3/valve3.jsp?{itm}' u = request.authorization.username p = request.authorization.password r = requests.get(url, auth=HTTPBasicAuth(u,", "{ 't': 30 } } } def get_rtnet(ch, st): j = api_request_to_json(f'rtnet?channel={ch}&starttime={st}') d", "range(0, max_rows)]] def get_so2emissions(ch, st): j = api_request_to_json(f'so2emissions?channel={ch}&starttime={st}') data = [] d =", "'t': 30 } } } def get_nps_so2(ch, st): j = api_request_to_json(f'npsadvisory?channel={ch}&starttime={st}') data =", "+ td.seconds): return True # Data is old td = now - maxdate", "import ( api_request_to_json, json_to_dataframe, starttime_str_to_seconds, ) TMP = join(dirname(realpath(__file__)), '../tmp/') LCL = join(dirname(realpath(__file__)),", "Th, Tr, Td, A from datetime import datetime, timedelta from flask import request", "data, 'layout': { 'margin': { 't': 30 } } } def get_nps_so2(ch, st):", "'data': data, 'layout': { 'margin': { 't': 30 }, 'yaxis': { 'title': 'Windspeed", "= join(dirname(realpath(__file__)), '../tmp/') LCL = join(dirname(realpath(__file__)), '../images/') def get_rsam(ch, st): j = api_request_to_json(f'rsam?channel={ch}&starttime={st}')", "'x': d.index, 'y': d['radial'], 'name': f\"radial {j['used_azimuth']:.1f}\" }) traces.append({ 'x': d.index, 'y': d['tangential'],", "= [] if not d.empty: d.sort_values('date', inplace=True) d['moment'] = d.prefMag.apply(lambda x: pow(10.0, 16.0", "f'&sc.0=T&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_tiltv(region): chs = '' if", "if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.windspeed, name='Wind Speed', mode='markers',", "kind == 'T': mid = d.date.min() mad = d.date.max() d.apply(create_tcircle_marker, arg=(m, mid, mad),", "region == 'merz': chs = '15,16' url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=1740&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=240&mh.0=900&chCnt.0=7&src.0=hvo_def_tilt' \\ f'&st.0=-28800000&et.0=N&lg.0=true&ch.0={chs}'", "'lerz': m = Map(location=[19.43, -154.88], min_zoom=11, max_zoom=15, zoom_start=11, tiles='Stamen Terrain') if kind ==", "(f'npsadvisory?channel={ch}&starttime={st}&series=windspeed,winddir') j = api_request_to_json(url) data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty:", "= api_request_to_json(f'hypocenter?geo={geo}&starttime={st}') d = pd.DataFrame(j['records']) if not d.empty: d['date'] = d['date'].str.slice(stop=-2) d['date'] =", "return { 'data': data, 'layout': { 'margin': { 't': 30 }, 'yaxis': {", "# Data is old td = now - maxdate if (td.seconds / 60)", "else: t = '%d%s-%s%s' % (now.year, str(d).zfill(3), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) return src.format(now.year, d, t)", "dict( size=4 ) }) traces.append({ 'x': d.index, 'y': d.north, 'name': 'North', 'mode': 'markers',", "return d.to_dict('records') def get_hypo_counts(st, data): d = json_to_dataframe(st, data) data = [] if", "chs = '18,20' elif region == 'merz': chs = '15,16' url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=1740&n=1&x.0=75&y.0=20'", ".maputils import create_dcircle_marker, create_tcircle_marker from .utils import ( api_request_to_json, json_to_dataframe, starttime_str_to_seconds, ) TMP", "import Th, Tr, Td, A from datetime import datetime, timedelta from flask import", "= f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=636&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=576&mh.0=900&chCnt.0=1' \\ f'&src.0=hvo_seismic_winston_helicorders&st.0=-28800000&et.0=N' \\ f'&chNames.0={ch}&dataTypes.0=275.000000&tc.0=15&barMult.0=3' \\ f'&sc.0=T&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url),", "region == 'lerz': m = Map(location=[19.43, -154.88], min_zoom=11, max_zoom=15, zoom_start=11, tiles='Stamen Terrain') if", "False, 'yaxis': { 'title': 'Earthquakes per Hour' }, 'yaxis2': { 'title': 'Cumulative Moment", "= d.moment.cumsum() bins = d.groupby(pd.Grouper(freq='60min', key='date')).count() data = [go.Bar( { 'x': bins.index, 'y':", "} } } def get_spectrogram(src): now = datetime.utcnow() d = now.timetuple().tm_yday tm =", "y=d.rsam, mode='markers', marker=dict(size=4) )] return { 'data': data, 'layout': { 'margin': { 't':", "/ 60) > 10: return True return False def get_hypos(geo, st): j =", "not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.avgso2, mode='markers', marker=dict(size=6) )] return", "= pd.DataFrame(j['records'][ch]) traces = [] if not d.empty: d.set_index('date', inplace=True) traces.append({ 'x': d.index,", "from dash_html_components import Th, Tr, Td, A from datetime import datetime, timedelta from", "'side': 'right' } } } def get_spectrogram(src): now = datetime.utcnow() d = now.timetuple().tm_yday", "dict( size=4 ) }) return { 'data': traces, 'layout': { 'margin': { 't':", "olddata.date.max() td = now - mindate # Requested more than is currently stored?", "% (now.year, str(d).zfill(3), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) return src.format(now.year, d, t) def get_helicorder(ch): url =", "get_helicorder(ch): url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=636&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=576&mh.0=900&chCnt.0=1' \\ f'&src.0=hvo_seismic_winston_helicorders&st.0=-28800000&et.0=N' \\ f'&chNames.0={ch}&dataTypes.0=275.000000&tc.0=15&barMult.0=3' \\ f'&sc.0=T&plotSeparately.0=false' encoded_img", "if region == 'kism': m = Map(location=[19.41, -155.27], min_zoom=12, max_zoom=15, zoom_start=13, tiles='Stamen Terrain')", "\\ f'&chNames.0={ch}&dataTypes.0=275.000000&tc.0=15&barMult.0=3' \\ f'&sc.0=T&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_tiltv(region): chs", "as f: f.write(r.content) return filename def get_ash3d_img(): url = ('https://volcanoes.usgs.gov/vsc/captures/ash3d/' '332010_1008443_D_deposit.gif') return url", "d.index, 'y': d['radial'], 'name': f\"radial {j['used_azimuth']:.1f}\" }) traces.append({ 'x': d.index, 'y': d['tangential'], 'name':", "True return False def get_hypos(geo, st): j = api_request_to_json(f'hypocenter?geo={geo}&starttime={st}') d = pd.DataFrame(j['records']) if", "if not d.empty: d['date'] = d['date'].str.slice(stop=-2) d['date'] = pd.to_datetime(d['date']) d.reset_index(drop=True, inplace=True) return d", "30 } } } def get_rtnet(ch, st): j = api_request_to_json(f'rtnet?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch])", "} } def get_spectrogram(src): now = datetime.utcnow() d = now.timetuple().tm_yday tm = now", "datetime import datetime, timedelta from flask import request from folium import Map from", "j = api_request_to_json(f'rtnet?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces = [] if not d.empty: d.set_index('date',", "= ['Post', 'Author', 'Date'] d = sorted(p, key=itemgetter('date'), reverse=True) link = 'https://hvointernal.wr.usgs.gov/hvo_logs/read?id={}' return", "traces = [] if not d.empty: d.set_index('date', inplace=True) traces.append({ 'x': d.index, 'y': d.east,", "traces.append({ 'x': d.index, 'y': d.north, 'name': 'North', 'mode': 'markers', 'marker': dict( size=4 )", "= request.authorization.username p = request.authorization.password r = requests.get(url, auth=HTTPBasicAuth(u, p)) with open(filename, 'wb')", "if kind == 'A': encoded_img = be(open(f'{LCL}dlegend.png', 'rb').read()) elif kind == 'T': encoded_img", "]) for i in range(0, max_rows)]] def get_so2emissions(ch, st): j = api_request_to_json(f'so2emissions?channel={ch}&starttime={st}') data", "from .maputils import create_dcircle_marker, create_tcircle_marker from .utils import ( api_request_to_json, json_to_dataframe, starttime_str_to_seconds, )", "import Map from operator import itemgetter from os.path import join, dirname, realpath from", "inplace=True) d['moment'] = d.prefMag.apply(lambda x: pow(10.0, 16.0 + ((3.0 * x)/2.0))) d['cmoment'] =", "}, 'showlegend': False, 'yaxis': { 'title': 'Earthquakes per Hour' }, 'yaxis2': { 'title':", "d[i]['subject'], target='_blank')), Td(children='%s' % d[i]['user']), Td(children='%s' % d[i]['date']) ]) for i in range(0,", "} } def get_and_store_hypos(geo, st, current_data): if is_data_needed(st, current_data): return get_hypos(geo, st).to_json() else:", "not d.empty: d.set_index('date', inplace=True) traces.append({ 'x': d.index, 'y': d['radial'], 'name': f\"radial {j['used_azimuth']:.1f}\" })", "}, 'xaxis': { 'range': [d.index.min(), d.index.max()] }, 'yaxis': { 'range': [d.rsam.min() - 20,", "requests.get(url, auth=HTTPBasicAuth(u, p)) with open(filename, 'wb') as f: f.write(r.content) return filename def get_ash3d_img():", "}, 'yaxis': { 'title': 'Windspeed (m/s)' }, 'yaxis2': { 'title': 'Wind Direction (deg)',", "api_request_to_json, json_to_dataframe, starttime_str_to_seconds, ) TMP = join(dirname(realpath(__file__)), '../tmp/') LCL = join(dirname(realpath(__file__)), '../images/') def", "encoded_img = be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_valve_plot(itm): filename = f'{TMP}valve{randint(0,9999):04d}.jpg' url =", "pd.DataFrame(j['records']) if not d.empty: d['date'] = d['date'].str.slice(stop=-2) d['date'] = pd.to_datetime(d['date']) d.reset_index(drop=True, inplace=True) return", "}, 'yaxis2': { 'title': 'Cumulative Moment (dyn-cm)', 'showgrid': False, 'overlaying': 'y', 'side': 'right'", ")] return { 'data': data, 'layout': { 'margin': { 't': 30 }, 'yaxis':", "= None if region == 'kism': m = Map(location=[19.41, -155.27], min_zoom=12, max_zoom=15, zoom_start=13,", "data, 'layout': { 'margin': { 't': 30 }, 'yaxis': { 'exponentformat': 'none' }", "d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.avgso2, mode='markers', marker=dict(size=6) )] return {", "elif region == 'merz': chs = '15,16' url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=1740&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=240&mh.0=900&chCnt.0=7&src.0=hvo_def_tilt' \\", "= pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.windspeed, name='Wind", "'margin': { 't': 30 }, 'showlegend': False, 'yaxis': { 'title': 'Earthquakes per Hour'", "if not d.empty: d.set_index('date', inplace=True) traces.append({ 'x': d.index, 'y': d['radial'], 'name': f\"radial {j['used_azimuth']:.1f}\"", "{ 'data': data, 'layout': { 'margin': { 't': 30 }, 'yaxis': { 'exponentformat':", "= be(open(f'{LCL}tlegend.png', 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_hypos_table(st, data): d = json_to_dataframe(st, data) if", "HTTPBasicAuth from .maputils import create_dcircle_marker, create_tcircle_marker from .utils import ( api_request_to_json, json_to_dataframe, starttime_str_to_seconds,", "data) if not d.empty: d.sort_values('date', inplace=True) return d.to_dict('records') def get_hypo_counts(st, data): d =", "{ 'data': data, 'layout': { 'margin': { 't': 30 }, 'showlegend': False, 'yaxis':", "in range(0, max_rows)]] def get_so2emissions(ch, st): j = api_request_to_json(f'so2emissions?channel={ch}&starttime={st}') data = [] d", "td.seconds): return True # Data is old td = now - maxdate if", "data: return True now = datetime.now() olddata = pd.read_json(data) mindate = olddata.date.min() maxdate", "'https://hvointernal.wr.usgs.gov/hvo_logs/read?id={}' return [[Tr([Th(col) for col in headers])] + [Tr([ Td(A(href=link.format(d[i]['id']), children='%s' % d[i]['subject'],", "= d.prefMag.apply(lambda x: pow(10.0, 16.0 + ((3.0 * x)/2.0))) d['cmoment'] = d.moment.cumsum() bins", "}, 'yaxis': { 'range': [d.rsam.min() - 20, 2 * d.rsam.mean()] } } }", "d.reset_index(drop=True, inplace=True) return d def get_hypos_map(st, kind, data, region): filename = f'{TMP}hypos{randint(0,9999):04d}.html' d", "return get_hypos(geo, st).to_json() else: return current_data def is_data_needed(st, data): if not data: return", "d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.windspeed, name='Wind Speed', mode='markers', marker=dict(size=6) ), go.Scatter(", "not data: return True now = datetime.now() olddata = pd.read_json(data) mindate = olddata.date.min()", "encoded_img = be(open(f'{LCL}tlegend.png', 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_hypos_table(st, data): d = json_to_dataframe(st, data)", "traces.append({ 'x': d.index, 'y': d.east, 'name': 'East', 'mode': 'markers', 'marker': dict( size=4 )", "td = now - mindate # Requested more than is currently stored? seconds", "'y': d.cmoment, 'name': 'Moment', 'yaxis': 'y2' })] return { 'data': data, 'layout': {", "max_zoom=15, zoom_start=13, tiles='Stamen Terrain') elif region == 'lerz': m = Map(location=[19.43, -154.88], min_zoom=11,", "} } def get_rtnet(ch, st): j = api_request_to_json(f'rtnet?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces =", "td = now - maxdate if (td.seconds / 60) > 10: return True", "-154.88], min_zoom=11, max_zoom=15, zoom_start=11, tiles='Stamen Terrain') if kind == 'T': mid = d.date.min()", "{ 'data': data, 'layout': { 'margin': { 't': 30 }, 'xaxis': { 'range':", "data): d = json_to_dataframe(st, data) if not d.empty: d.sort_values('date', inplace=True) return d.to_dict('records') def", "'18,20' elif region == 'merz': chs = '15,16' url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=1740&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=240&mh.0=900&chCnt.0=7&src.0=hvo_def_tilt'", "'name': 'Moment', 'yaxis': 'y2' })] return { 'data': data, 'layout': { 'margin': {", "Moment (dyn-cm)', 'showgrid': False, 'overlaying': 'y', 'side': 'right' } } } def get_spectrogram(src):", "= f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=1740&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=240&mh.0=900&chCnt.0=7&src.0=hvo_def_tilt' \\ f'&st.0=-28800000&et.0=N&lg.0=true&ch.0={chs}' \\ f'&dataTypes.0=NaN&plotType.0=tv&rk.0=1&ds.0=None&dsInt.0=&sdt.0=' \\ f'&az.0=n&azval.0=&linetype.0=l&ysLMin.0=&ysLMax.0=&ysRMin.0=' \\ f'&ysRMax.0=&despike_period.0=&filter_arg1.0=&filter_arg2.0=' \\", "mode='markers', marker=dict(size=10) )] return { 'data': data, 'layout': { 'margin': { 't': 30", "j = api_request_to_json(f'npsadvisory?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date',", "f'&despike.0=F&detrend.0=F&dmo_fl.0=0&filter_arg3.0=' \\ f'&dmo_arithmetic.0=None&dmo_arithmetic_value.0=&dmo_db.0=0' \\ f'&debias_period.0=&radial.0=T&tangential.0=T&xTilt.0=F&yTilt.0=F' \\ f'&magnitude.0=F&azimuth.0=F&holeTemp.0=F&boxTemp.0=F&instVolt.0=F' \\ f'&rainfall.0=F&vs.0=&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read())", "st): j = api_request_to_json(f'hypocenter?geo={geo}&starttime={st}') d = pd.DataFrame(j['records']) if not d.empty: d['date'] = d['date'].str.slice(stop=-2)", "'exponentformat': 'none' } } } def get_nps_wind(ch, st): url = (f'npsadvisory?channel={ch}&starttime={st}&series=windspeed,winddir') j =", "now - timedelta(minutes=now.minute % 10, seconds=now.second, microseconds=now.microsecond) if 'ipensive' in src: t =", "be from dash_html_components import Th, Tr, Td, A from datetime import datetime, timedelta", "}) return { 'data': traces, 'layout': { 'margin': { 't': 30 } }", "d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.windspeed, name='Wind Speed', mode='markers', marker=dict(size=6) ),", "f\"tangential {j['tangential_azimuth']:.1f}\" }) return { 'data': traces, 'layout': { 'margin': { 't': 30", "from os.path import join, dirname, realpath from random import randint from requests.auth import", ".utils import ( api_request_to_json, json_to_dataframe, starttime_str_to_seconds, ) TMP = join(dirname(realpath(__file__)), '../tmp/') LCL =", "dict( size=4 ) }) traces.append({ 'x': d.index, 'y': d.up, 'name': 'Up', 'mode': 'markers',", "d.index, 'y': d.north, 'name': 'North', 'mode': 'markers', 'marker': dict( size=4 ) }) traces.append({", "d['radial'], 'name': f\"radial {j['used_azimuth']:.1f}\" }) traces.append({ 'x': d.index, 'y': d['tangential'], 'name': f\"tangential {j['tangential_azimuth']:.1f}\"", "'showlegend': False, 'yaxis': { 'title': 'Earthquakes per Hour' }, 'yaxis2': { 'title': 'Cumulative", "go import requests from base64 import b64encode as be from dash_html_components import Th,", "mode='markers', marker=dict(size=4) )] return { 'data': data, 'layout': { 'margin': { 't': 30", "bins.depth, 'name': 'Count' }), go.Scatter( { 'x': d.date, 'y': d.cmoment, 'name': 'Moment', 'yaxis':", "filename def get_ash3d_img(): url = ('https://volcanoes.usgs.gov/vsc/captures/ash3d/' '332010_1008443_D_deposit.gif') return url def get_logs(max_rows=20): p =", "'margin': { 't': 30 } } } def get_and_store_hypos(geo, st, current_data): if is_data_needed(st,", "'range': [d.index.min(), d.index.max()] }, 'yaxis': { 'range': [d.rsam.min() - 20, 2 * d.rsam.mean()]", "* d.rsam.mean()] } } } def get_tilt(ch, st): j = api_request_to_json(f'tilt?channel={ch}&starttime={st}') d =", "traces, 'layout': { 'margin': { 't': 30 } } } def get_and_store_hypos(geo, st,", "axis=1) m.save(filename) return open(filename, 'r').read() def get_hypos_legend(kind): encoded_img = None if kind ==", "'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_valve_plot(itm): filename = f'{TMP}valve{randint(0,9999):04d}.jpg' url = f'https://hvovalve.wr.usgs.gov/valve3/valve3.jsp?{itm}' u =", "% d[i]['user']), Td(children='%s' % d[i]['date']) ]) for i in range(0, max_rows)]] def get_so2emissions(ch,", "t) def get_helicorder(ch): url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=636&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=576&mh.0=900&chCnt.0=1' \\ f'&src.0=hvo_seismic_winston_helicorders&st.0=-28800000&et.0=N' \\ f'&chNames.0={ch}&dataTypes.0=275.000000&tc.0=15&barMult.0=3' \\", ") }) traces.append({ 'x': d.index, 'y': d.north, 'name': 'North', 'mode': 'markers', 'marker': dict(", "'range': [d.rsam.min() - 20, 2 * d.rsam.mean()] } } } def get_tilt(ch, st):", "be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_valve_plot(itm): filename = f'{TMP}valve{randint(0,9999):04d}.jpg' url = f'https://hvovalve.wr.usgs.gov/valve3/valve3.jsp?{itm}' u", "d.sort_values('date', inplace=True) d['moment'] = d.prefMag.apply(lambda x: pow(10.0, 16.0 + ((3.0 * x)/2.0))) d['cmoment']", "u = request.authorization.username p = request.authorization.password r = requests.get(url, auth=HTTPBasicAuth(u, p)) with open(filename,", "join, dirname, realpath from random import randint from requests.auth import HTTPBasicAuth from .maputils", "} } } def get_rtnet(ch, st): j = api_request_to_json(f'rtnet?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces", "\\ f'&despike.0=F&detrend.0=F&dmo_fl.0=0&filter_arg3.0=' \\ f'&dmo_arithmetic.0=None&dmo_arithmetic_value.0=&dmo_db.0=0' \\ f'&debias_period.0=&radial.0=T&tangential.0=T&xTilt.0=F&yTilt.0=F' \\ f'&magnitude.0=F&azimuth.0=F&holeTemp.0=F&boxTemp.0=F&instVolt.0=F' \\ f'&rainfall.0=F&vs.0=&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url),", "d.empty: d.set_index('date', inplace=True) traces.append({ 'x': d.index, 'y': d['radial'], 'name': f\"radial {j['used_azimuth']:.1f}\" }) traces.append({", "timedelta(minutes=now.minute % 10, seconds=now.second, microseconds=now.microsecond) if 'ipensive' in src: t = '%d%s%s-%s%s' %", "30 }, 'xaxis': { 'range': [d.index.min(), d.index.max()] }, 'yaxis': { 'range': [d.rsam.min() -", "'x': d.index, 'y': d['tangential'], 'name': f\"tangential {j['tangential_azimuth']:.1f}\" }) return { 'data': traces, 'layout':", "max_zoom=15, zoom_start=11, tiles='Stamen Terrain') if kind == 'T': mid = d.date.min() mad =", "= f'{TMP}valve{randint(0,9999):04d}.jpg' url = f'https://hvovalve.wr.usgs.gov/valve3/valve3.jsp?{itm}' u = request.authorization.username p = request.authorization.password r =", "import plotly.graph_objs as go import requests from base64 import b64encode as be from", "= api_request_to_json(f'rtnet?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces = [] if not d.empty: d.set_index('date', inplace=True)", "True now = datetime.now() olddata = pd.read_json(data) mindate = olddata.date.min() maxdate = olddata.date.max()", "# Requested more than is currently stored? seconds = starttime_str_to_seconds(st) if seconds >", "y=d.so2, mode='markers', marker=dict(size=10) )] return { 'data': data, 'layout': { 'margin': { 't':", "(now.year, str(d).zfill(3), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) return src.format(now.year, d, t) def get_helicorder(ch): url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=636&n=1&x.0=75&y.0=20'", "pd.to_datetime(d['date']) d.reset_index(drop=True, inplace=True) return d def get_hypos_map(st, kind, data, region): filename = f'{TMP}hypos{randint(0,9999):04d}.html'", "arg=(m, mid, mad), axis=1) elif kind == 'A': d.apply(create_dcircle_marker, args=(m,), axis=1) m.save(filename) return", "f'https://hvovalve.wr.usgs.gov/valve3/valve3.jsp?{itm}' u = request.authorization.username p = request.authorization.password r = requests.get(url, auth=HTTPBasicAuth(u, p)) with", "pandas as pd import plotly.graph_objs as go import requests from base64 import b64encode", "current_data): return get_hypos(geo, st).to_json() else: return current_data def is_data_needed(st, data): if not data:", "- maxdate if (td.seconds / 60) > 10: return True return False def", "'title': 'Wind Direction (deg)', 'showgrid': False, 'overlaying': 'y', 'side': 'right' } } }", "'data': traces, 'layout': { 'margin': { 't': 30 } } } def get_rtnet(ch,", "'title': 'Windspeed (m/s)' }, 'yaxis2': { 'title': 'Wind Direction (deg)', 'showgrid': False, 'overlaying':", "'margin': { 't': 30 } } } def get_nps_so2(ch, st): j = api_request_to_json(f'npsadvisory?channel={ch}&starttime={st}')", "d.north, 'name': 'North', 'mode': 'markers', 'marker': dict( size=4 ) }) traces.append({ 'x': d.index,", "return { 'data': data, 'layout': { 'margin': { 't': 30 } } }", "}, 'yaxis': { 'exponentformat': 'none' } } } def get_nps_wind(ch, st): url =", "d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.avgso2, mode='markers', marker=dict(size=6) )] return { 'data':", "as go import requests from base64 import b64encode as be from dash_html_components import", "import HTTPBasicAuth from .maputils import create_dcircle_marker, create_tcircle_marker from .utils import ( api_request_to_json, json_to_dataframe,", "get_logs(max_rows=20): p = api_request_to_json('logs')['posts'] headers = ['Post', 'Author', 'Date'] d = sorted(p, key=itemgetter('date'),", "for i in range(0, max_rows)]] def get_so2emissions(ch, st): j = api_request_to_json(f'so2emissions?channel={ch}&starttime={st}') data =", "'15,16' url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=1740&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=240&mh.0=900&chCnt.0=7&src.0=hvo_def_tilt' \\ f'&st.0=-28800000&et.0=N&lg.0=true&ch.0={chs}' \\ f'&dataTypes.0=NaN&plotType.0=tv&rk.0=1&ds.0=None&dsInt.0=&sdt.0=' \\ f'&az.0=n&azval.0=&linetype.0=l&ysLMin.0=&ysLMax.0=&ysRMin.0=' \\", "= [go.Scatter( x=d.index, y=d.windspeed, name='Wind Speed', mode='markers', marker=dict(size=6) ), go.Scatter( x=d.index, y=d.winddir, name='Wind", "children='%s' % d[i]['subject'], target='_blank')), Td(children='%s' % d[i]['user']), Td(children='%s' % d[i]['date']) ]) for i", "'right' } } } def get_spectrogram(src): now = datetime.utcnow() d = now.timetuple().tm_yday tm", "{ 'margin': { 't': 30 } } } def get_rtnet(ch, st): j =", "'t': 30 } } } def get_and_store_hypos(geo, st, current_data): if is_data_needed(st, current_data): return", "args=(m,), axis=1) m.save(filename) return open(filename, 'r').read() def get_hypos_legend(kind): encoded_img = None if kind", "kind == 'T': encoded_img = be(open(f'{LCL}tlegend.png', 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_hypos_table(st, data): d", "(now.year, str(now.month).zfill(2), str(now.day).zfill(2), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) else: t = '%d%s-%s%s' % (now.year, str(d).zfill(3), str(tm.hour).zfill(2),", "f'&dmo_arithmetic.0=None&dmo_arithmetic_value.0=&dmo_db.0=0' \\ f'&debias_period.0=&radial.0=T&tangential.0=T&xTilt.0=F&yTilt.0=F' \\ f'&magnitude.0=F&azimuth.0=F&holeTemp.0=F&boxTemp.0=F&instVolt.0=F' \\ f'&rainfall.0=F&vs.0=&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\"", "'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_hypos_table(st, data): d = json_to_dataframe(st, data) if not d.empty:", "'xaxis': { 'range': [d.index.min(), d.index.max()] }, 'yaxis': { 'range': [d.rsam.min() - 20, 2", "str(tm.minute).zfill(2)) return src.format(now.year, d, t) def get_helicorder(ch): url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=636&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=576&mh.0=900&chCnt.0=1' \\", "= '%d%s%s-%s%s' % (now.year, str(now.month).zfill(2), str(now.day).zfill(2), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) else: t = '%d%s-%s%s' %", "dirname, realpath from random import randint from requests.auth import HTTPBasicAuth from .maputils import", "def get_nps_wind(ch, st): url = (f'npsadvisory?channel={ch}&starttime={st}&series=windspeed,winddir') j = api_request_to_json(url) data = [] d", "is old td = now - maxdate if (td.seconds / 60) > 10:", "sorted(p, key=itemgetter('date'), reverse=True) link = 'https://hvointernal.wr.usgs.gov/hvo_logs/read?id={}' return [[Tr([Th(col) for col in headers])] +", "Td, A from datetime import datetime, timedelta from flask import request from folium", "'y': d['radial'], 'name': f\"radial {j['used_azimuth']:.1f}\" }) traces.append({ 'x': d.index, 'y': d['tangential'], 'name': f\"tangential", "{ 'data': traces, 'layout': { 'margin': { 't': 30 } } } def", "'markers', 'marker': dict( size=4 ) }) traces.append({ 'x': d.index, 'y': d.north, 'name': 'North',", "= now - maxdate if (td.seconds / 60) > 10: return True return", "f'{TMP}valve{randint(0,9999):04d}.jpg' url = f'https://hvovalve.wr.usgs.gov/valve3/valve3.jsp?{itm}' u = request.authorization.username p = request.authorization.password r = requests.get(url,", "'North', 'mode': 'markers', 'marker': dict( size=4 ) }) traces.append({ 'x': d.index, 'y': d.up,", "\\ f'&rainfall.0=F&vs.0=&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_valve_plot(itm): filename = f'{TMP}valve{randint(0,9999):04d}.jpg'", "d.date, 'y': d.cmoment, 'name': 'Moment', 'yaxis': 'y2' })] return { 'data': data, 'layout':", "(dyn-cm)', 'showgrid': False, 'overlaying': 'y', 'side': 'right' } } } def get_spectrogram(src): now", "mindate # Requested more than is currently stored? seconds = starttime_str_to_seconds(st) if seconds", "def get_logs(max_rows=20): p = api_request_to_json('logs')['posts'] headers = ['Post', 'Author', 'Date'] d = sorted(p,", "os.path import join, dirname, realpath from random import randint from requests.auth import HTTPBasicAuth", "f'&az.0=n&azval.0=&linetype.0=l&ysLMin.0=&ysLMax.0=&ysRMin.0=' \\ f'&ysRMax.0=&despike_period.0=&filter_arg1.0=&filter_arg2.0=' \\ f'&despike.0=F&detrend.0=F&dmo_fl.0=0&filter_arg3.0=' \\ f'&dmo_arithmetic.0=None&dmo_arithmetic_value.0=&dmo_db.0=0' \\ f'&debias_period.0=&radial.0=T&tangential.0=T&xTilt.0=F&yTilt.0=F' \\ f'&magnitude.0=F&azimuth.0=F&holeTemp.0=F&boxTemp.0=F&instVolt.0=F' \\ f'&rainfall.0=F&vs.0=&plotSeparately.0=false'", "import create_dcircle_marker, create_tcircle_marker from .utils import ( api_request_to_json, json_to_dataframe, starttime_str_to_seconds, ) TMP =", "i in range(0, max_rows)]] def get_so2emissions(ch, st): j = api_request_to_json(f'so2emissions?channel={ch}&starttime={st}') data = []", "d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.so2, mode='markers', marker=dict(size=10) )] return {", "'t': 30 }, 'xaxis': { 'range': [d.index.min(), d.index.max()] }, 'yaxis': { 'range': [d.rsam.min()", "return d def get_hypos_map(st, kind, data, region): filename = f'{TMP}hypos{randint(0,9999):04d}.html' d = json_to_dataframe(st,", "tm = now - timedelta(minutes=now.minute % 10, seconds=now.second, microseconds=now.microsecond) if 'ipensive' in src:", "{ 'exponentformat': 'none' } } } def get_nps_wind(ch, st): url = (f'npsadvisory?channel={ch}&starttime={st}&series=windspeed,winddir') j", "d['tangential'], 'name': f\"tangential {j['tangential_azimuth']:.1f}\" }) return { 'data': traces, 'layout': { 'margin': {", "get_hypo_counts(st, data): d = json_to_dataframe(st, data) data = [] if not d.empty: d.sort_values('date',", "'Cumulative Moment (dyn-cm)', 'showgrid': False, 'overlaying': 'y', 'side': 'right' } } } def", "pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.so2, mode='markers', marker=dict(size=10)", "name='Wind Speed', mode='markers', marker=dict(size=6) ), go.Scatter( x=d.index, y=d.winddir, name='Wind Dir', yaxis='y2', mode='markers', marker=dict(size=6)", "def get_valve_plot(itm): filename = f'{TMP}valve{randint(0,9999):04d}.jpg' url = f'https://hvovalve.wr.usgs.gov/valve3/valve3.jsp?{itm}' u = request.authorization.username p =", "json_to_dataframe, starttime_str_to_seconds, ) TMP = join(dirname(realpath(__file__)), '../tmp/') LCL = join(dirname(realpath(__file__)), '../images/') def get_rsam(ch,", "'y2' })] return { 'data': data, 'layout': { 'margin': { 't': 30 },", "[go.Bar( { 'x': bins.index, 'y': bins.depth, 'name': 'Count' }), go.Scatter( { 'x': d.date,", "'wb') as f: f.write(r.content) return filename def get_ash3d_img(): url = ('https://volcanoes.usgs.gov/vsc/captures/ash3d/' '332010_1008443_D_deposit.gif') return", "Td(A(href=link.format(d[i]['id']), children='%s' % d[i]['subject'], target='_blank')), Td(children='%s' % d[i]['user']), Td(children='%s' % d[i]['date']) ]) for", "'y': d.north, 'name': 'North', 'mode': 'markers', 'marker': dict( size=4 ) }) traces.append({ 'x':", "now - mindate # Requested more than is currently stored? seconds = starttime_str_to_seconds(st)", "filename = f'{TMP}valve{randint(0,9999):04d}.jpg' url = f'https://hvovalve.wr.usgs.gov/valve3/valve3.jsp?{itm}' u = request.authorization.username p = request.authorization.password r", "traces, 'layout': { 'margin': { 't': 30 } } } def get_rtnet(ch, st):", "False, 'overlaying': 'y', 'side': 'right' } } } def get_spectrogram(src): now = datetime.utcnow()", "- mindate # Requested more than is currently stored? seconds = starttime_str_to_seconds(st) if", "86400 + td.seconds): return True # Data is old td = now -", "data, 'layout': { 'margin': { 't': 30 }, 'showlegend': False, 'yaxis': { 'title':", "} def get_nps_so2(ch, st): j = api_request_to_json(f'npsadvisory?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch])", "r = requests.get(url, auth=HTTPBasicAuth(u, p)) with open(filename, 'wb') as f: f.write(r.content) return filename", "{ 'margin': { 't': 30 }, 'xaxis': { 'range': [d.index.min(), d.index.max()] }, 'yaxis':", "j = api_request_to_json(url) data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date',", "d['moment'] = d.prefMag.apply(lambda x: pow(10.0, 16.0 + ((3.0 * x)/2.0))) d['cmoment'] = d.moment.cumsum()", "% 10, seconds=now.second, microseconds=now.microsecond) if 'ipensive' in src: t = '%d%s%s-%s%s' % (now.year,", "str(d).zfill(3), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) return src.format(now.year, d, t) def get_helicorder(ch): url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=636&n=1&x.0=75&y.0=20' \\", "go.Scatter( x=d.index, y=d.winddir, name='Wind Dir', yaxis='y2', mode='markers', marker=dict(size=6) )] return { 'data': data,", "'Moment', 'yaxis': 'y2' })] return { 'data': data, 'layout': { 'margin': { 't':", "d = sorted(p, key=itemgetter('date'), reverse=True) link = 'https://hvointernal.wr.usgs.gov/hvo_logs/read?id={}' return [[Tr([Th(col) for col in", "}) traces.append({ 'x': d.index, 'y': d.up, 'name': 'Up', 'mode': 'markers', 'marker': dict( size=4", "x=d.index, y=d.avgso2, mode='markers', marker=dict(size=6) )] return { 'data': data, 'layout': { 'margin': {", "data = [go.Scatter( x=d.index, y=d.rsam, mode='markers', marker=dict(size=4) )] return { 'data': data, 'layout':", "from datetime import datetime, timedelta from flask import request from folium import Map", "= [] if not d.empty: d.set_index('date', inplace=True) traces.append({ 'x': d.index, 'y': d['radial'], 'name':", "olddata.date.min() maxdate = olddata.date.max() td = now - mindate # Requested more than", "[] if not d.empty: d.sort_values('date', inplace=True) d['moment'] = d.prefMag.apply(lambda x: pow(10.0, 16.0 +", "'t': 30 }, 'yaxis': { 'exponentformat': 'none' } } } def get_nps_wind(ch, st):", "return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_tiltv(region): chs = '' if region == 'kism': chs =", "'merz': chs = '15,16' url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=1740&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=240&mh.0=900&chCnt.0=7&src.0=hvo_def_tilt' \\ f'&st.0=-28800000&et.0=N&lg.0=true&ch.0={chs}' \\ f'&dataTypes.0=NaN&plotType.0=tv&rk.0=1&ds.0=None&dsInt.0=&sdt.0='", "= pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.avgso2, mode='markers',", "f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=636&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=576&mh.0=900&chCnt.0=1' \\ f'&src.0=hvo_seismic_winston_helicorders&st.0=-28800000&et.0=N' \\ f'&chNames.0={ch}&dataTypes.0=275.000000&tc.0=15&barMult.0=3' \\ f'&sc.0=T&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read())", "def get_helicorder(ch): url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=636&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=576&mh.0=900&chCnt.0=1' \\ f'&src.0=hvo_seismic_winston_helicorders&st.0=-28800000&et.0=N' \\ f'&chNames.0={ch}&dataTypes.0=275.000000&tc.0=15&barMult.0=3' \\ f'&sc.0=T&plotSeparately.0=false'", "zoom_start=13, tiles='Stamen Terrain') elif region == 'lerz': m = Map(location=[19.43, -154.88], min_zoom=11, max_zoom=15,", "st): j = api_request_to_json(f'rsam?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty:", "request from folium import Map from operator import itemgetter from os.path import join,", "microseconds=now.microsecond) if 'ipensive' in src: t = '%d%s%s-%s%s' % (now.year, str(now.month).zfill(2), str(now.day).zfill(2), str(tm.hour).zfill(2),", "plotly.graph_objs as go import requests from base64 import b64encode as be from dash_html_components", "f'&dataTypes.0=NaN&plotType.0=tv&rk.0=1&ds.0=None&dsInt.0=&sdt.0=' \\ f'&az.0=n&azval.0=&linetype.0=l&ysLMin.0=&ysLMax.0=&ysRMin.0=' \\ f'&ysRMax.0=&despike_period.0=&filter_arg1.0=&filter_arg2.0=' \\ f'&despike.0=F&detrend.0=F&dmo_fl.0=0&filter_arg3.0=' \\ f'&dmo_arithmetic.0=None&dmo_arithmetic_value.0=&dmo_db.0=0' \\ f'&debias_period.0=&radial.0=T&tangential.0=T&xTilt.0=F&yTilt.0=F' \\ f'&magnitude.0=F&azimuth.0=F&holeTemp.0=F&boxTemp.0=F&instVolt.0=F'", "return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_valve_plot(itm): filename = f'{TMP}valve{randint(0,9999):04d}.jpg' url = f'https://hvovalve.wr.usgs.gov/valve3/valve3.jsp?{itm}' u = request.authorization.username", "d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.rsam, mode='markers', marker=dict(size=4) )] return {", "chs = '' if region == 'kism': chs = '18,20' elif region ==", "encoded_img = be(open(f'{LCL}dlegend.png', 'rb').read()) elif kind == 'T': encoded_img = be(open(f'{LCL}tlegend.png', 'rb').read()) return", "'rb').read()) elif kind == 'T': encoded_img = be(open(f'{LCL}tlegend.png', 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_hypos_table(st,", "d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.rsam,", "return src.format(now.year, d, t) def get_helicorder(ch): url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=636&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=576&mh.0=900&chCnt.0=1' \\ f'&src.0=hvo_seismic_winston_helicorders&st.0=-28800000&et.0=N'", "headers])] + [Tr([ Td(A(href=link.format(d[i]['id']), children='%s' % d[i]['subject'], target='_blank')), Td(children='%s' % d[i]['user']), Td(children='%s' %", "get_hypos_table(st, data): d = json_to_dataframe(st, data) if not d.empty: d.sort_values('date', inplace=True) return d.to_dict('records')", "def get_nps_so2(ch, st): j = api_request_to_json(f'npsadvisory?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if", "TMP = join(dirname(realpath(__file__)), '../tmp/') LCL = join(dirname(realpath(__file__)), '../images/') def get_rsam(ch, st): j =", "'mode': 'markers', 'marker': dict( size=4 ) }) return { 'data': traces, 'layout': {", "}), go.Scatter( { 'x': d.date, 'y': d.cmoment, 'name': 'Moment', 'yaxis': 'y2' })] return", "return { 'data': traces, 'layout': { 'margin': { 't': 30 } } }", "= [go.Bar( { 'x': bins.index, 'y': bins.depth, 'name': 'Count' }), go.Scatter( { 'x':", "if not data: return True now = datetime.now() olddata = pd.read_json(data) mindate =", "return open(filename, 'r').read() def get_hypos_legend(kind): encoded_img = None if kind == 'A': encoded_img", "d.cmoment, 'name': 'Moment', 'yaxis': 'y2' })] return { 'data': data, 'layout': { 'margin':", "d = pd.DataFrame(j['records'][ch]) traces = [] if not d.empty: d.set_index('date', inplace=True) traces.append({ 'x':", "'title': 'Earthquakes per Hour' }, 'yaxis2': { 'title': 'Cumulative Moment (dyn-cm)', 'showgrid': False,", "= request.authorization.password r = requests.get(url, auth=HTTPBasicAuth(u, p)) with open(filename, 'wb') as f: f.write(r.content)", "= Map(location=[19.41, -155.27], min_zoom=12, max_zoom=15, zoom_start=13, tiles='Stamen Terrain') elif region == 'lerz': m", "request.authorization.password r = requests.get(url, auth=HTTPBasicAuth(u, p)) with open(filename, 'wb') as f: f.write(r.content) return", "def get_ash3d_img(): url = ('https://volcanoes.usgs.gov/vsc/captures/ash3d/' '332010_1008443_D_deposit.gif') return url def get_logs(max_rows=20): p = api_request_to_json('logs')['posts']", "[go.Scatter( x=d.index, y=d.rsam, mode='markers', marker=dict(size=4) )] return { 'data': data, 'layout': { 'margin':", "= datetime.now() olddata = pd.read_json(data) mindate = olddata.date.min() maxdate = olddata.date.max() td =", "from flask import request from folium import Map from operator import itemgetter from", "dash_html_components import Th, Tr, Td, A from datetime import datetime, timedelta from flask", "= d.groupby(pd.Grouper(freq='60min', key='date')).count() data = [go.Bar( { 'x': bins.index, 'y': bins.depth, 'name': 'Count'", "16.0 + ((3.0 * x)/2.0))) d['cmoment'] = d.moment.cumsum() bins = d.groupby(pd.Grouper(freq='60min', key='date')).count() data", "import pandas as pd import plotly.graph_objs as go import requests from base64 import", "'layout': { 'margin': { 't': 30 } } } def get_and_store_hypos(geo, st, current_data):", "col in headers])] + [Tr([ Td(A(href=link.format(d[i]['id']), children='%s' % d[i]['subject'], target='_blank')), Td(children='%s' % d[i]['user']),", "d.date.max() d.apply(create_tcircle_marker, arg=(m, mid, mad), axis=1) elif kind == 'A': d.apply(create_dcircle_marker, args=(m,), axis=1)", "'name': f\"tangential {j['tangential_azimuth']:.1f}\" }) return { 'data': traces, 'layout': { 'margin': { 't':", "'A': d.apply(create_dcircle_marker, args=(m,), axis=1) m.save(filename) return open(filename, 'r').read() def get_hypos_legend(kind): encoded_img = None", "= be(open(f'{LCL}dlegend.png', 'rb').read()) elif kind == 'T': encoded_img = be(open(f'{LCL}tlegend.png', 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\"", "= [] if not d.empty: d.set_index('date', inplace=True) traces.append({ 'x': d.index, 'y': d.east, 'name':", "= [go.Scatter( x=d.index, y=d.avgso2, mode='markers', marker=dict(size=6) )] return { 'data': data, 'layout': {", ") TMP = join(dirname(realpath(__file__)), '../tmp/') LCL = join(dirname(realpath(__file__)), '../images/') def get_rsam(ch, st): j", "'data': traces, 'layout': { 'margin': { 't': 30 } } } def get_and_store_hypos(geo,", "f'&ysRMax.0=&despike_period.0=&filter_arg1.0=&filter_arg2.0=' \\ f'&despike.0=F&detrend.0=F&dmo_fl.0=0&filter_arg3.0=' \\ f'&dmo_arithmetic.0=None&dmo_arithmetic_value.0=&dmo_db.0=0' \\ f'&debias_period.0=&radial.0=T&tangential.0=T&xTilt.0=F&yTilt.0=F' \\ f'&magnitude.0=F&azimuth.0=F&holeTemp.0=F&boxTemp.0=F&instVolt.0=F' \\ f'&rainfall.0=F&vs.0=&plotSeparately.0=false' encoded_img =", "= [go.Scatter( x=d.index, y=d.so2, mode='markers', marker=dict(size=10) )] return { 'data': data, 'layout': {", "), go.Scatter( x=d.index, y=d.winddir, name='Wind Dir', yaxis='y2', mode='markers', marker=dict(size=6) )] return { 'data':", "import requests from base64 import b64encode as be from dash_html_components import Th, Tr,", "if seconds > (td.days * 86400 + td.seconds): return True # Data is", "if not d.empty: d.sort_values('date', inplace=True) return d.to_dict('records') def get_hypo_counts(st, data): d = json_to_dataframe(st,", "p = request.authorization.password r = requests.get(url, auth=HTTPBasicAuth(u, p)) with open(filename, 'wb') as f:", "inplace=True) data = [go.Scatter( x=d.index, y=d.rsam, mode='markers', marker=dict(size=4) )] return { 'data': data,", "str(tm.minute).zfill(2)) else: t = '%d%s-%s%s' % (now.year, str(d).zfill(3), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) return src.format(now.year, d,", "x=d.index, y=d.rsam, mode='markers', marker=dict(size=4) )] return { 'data': data, 'layout': { 'margin': {", "pow(10.0, 16.0 + ((3.0 * x)/2.0))) d['cmoment'] = d.moment.cumsum() bins = d.groupby(pd.Grouper(freq='60min', key='date')).count()", "== 'kism': m = Map(location=[19.41, -155.27], min_zoom=12, max_zoom=15, zoom_start=13, tiles='Stamen Terrain') elif region", "30 }, 'showlegend': False, 'yaxis': { 'title': 'Earthquakes per Hour' }, 'yaxis2': {", "old td = now - maxdate if (td.seconds / 60) > 10: return", "}) traces.append({ 'x': d.index, 'y': d['tangential'], 'name': f\"tangential {j['tangential_azimuth']:.1f}\" }) return { 'data':", "x)/2.0))) d['cmoment'] = d.moment.cumsum() bins = d.groupby(pd.Grouper(freq='60min', key='date')).count() data = [go.Bar( { 'x':", "'Date'] d = sorted(p, key=itemgetter('date'), reverse=True) link = 'https://hvointernal.wr.usgs.gov/hvo_logs/read?id={}' return [[Tr([Th(col) for col", "= now - timedelta(minutes=now.minute % 10, seconds=now.second, microseconds=now.microsecond) if 'ipensive' in src: t", "is_data_needed(st, current_data): return get_hypos(geo, st).to_json() else: return current_data def is_data_needed(st, data): if not", "marker=dict(size=4) )] return { 'data': data, 'layout': { 'margin': { 't': 30 },", "'showgrid': False, 'overlaying': 'y', 'side': 'right' } } } def get_spectrogram(src): now =", "Td(children='%s' % d[i]['date']) ]) for i in range(0, max_rows)]] def get_so2emissions(ch, st): j", "import itemgetter from os.path import join, dirname, realpath from random import randint from", "% d[i]['date']) ]) for i in range(0, max_rows)]] def get_so2emissions(ch, st): j =", "y=d.winddir, name='Wind Dir', yaxis='y2', mode='markers', marker=dict(size=6) )] return { 'data': data, 'layout': {", "{ 'x': bins.index, 'y': bins.depth, 'name': 'Count' }), go.Scatter( { 'x': d.date, 'y':", "( api_request_to_json, json_to_dataframe, starttime_str_to_seconds, ) TMP = join(dirname(realpath(__file__)), '../tmp/') LCL = join(dirname(realpath(__file__)), '../images/')", "f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=1740&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=240&mh.0=900&chCnt.0=7&src.0=hvo_def_tilt' \\ f'&st.0=-28800000&et.0=N&lg.0=true&ch.0={chs}' \\ f'&dataTypes.0=NaN&plotType.0=tv&rk.0=1&ds.0=None&dsInt.0=&sdt.0=' \\ f'&az.0=n&azval.0=&linetype.0=l&ysLMin.0=&ysLMax.0=&ysRMin.0=' \\ f'&ysRMax.0=&despike_period.0=&filter_arg1.0=&filter_arg2.0=' \\ f'&despike.0=F&detrend.0=F&dmo_fl.0=0&filter_arg3.0='", "request.authorization.username p = request.authorization.password r = requests.get(url, auth=HTTPBasicAuth(u, p)) with open(filename, 'wb') as", "from folium import Map from operator import itemgetter from os.path import join, dirname,", "[] if not d.empty: d.set_index('date', inplace=True) traces.append({ 'x': d.index, 'y': d.east, 'name': 'East',", "maxdate if (td.seconds / 60) > 10: return True return False def get_hypos(geo,", "not d.empty: d.sort_values('date', inplace=True) d['moment'] = d.prefMag.apply(lambda x: pow(10.0, 16.0 + ((3.0 *", "\\ f'&src.0=hvo_seismic_winston_helicorders&st.0=-28800000&et.0=N' \\ f'&chNames.0={ch}&dataTypes.0=275.000000&tc.0=15&barMult.0=3' \\ f'&sc.0=T&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def", "True # Data is old td = now - maxdate if (td.seconds /", "[Tr([ Td(A(href=link.format(d[i]['id']), children='%s' % d[i]['subject'], target='_blank')), Td(children='%s' % d[i]['user']), Td(children='%s' % d[i]['date']) ])", "data, 'layout': { 'margin': { 't': 30 }, 'yaxis': { 'title': 'Windspeed (m/s)'", "return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_hypos_table(st, data): d = json_to_dataframe(st, data) if not d.empty: d.sort_values('date',", "= '%d%s-%s%s' % (now.year, str(d).zfill(3), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) return src.format(now.year, d, t) def get_helicorder(ch):", "'Windspeed (m/s)' }, 'yaxis2': { 'title': 'Wind Direction (deg)', 'showgrid': False, 'overlaying': 'y',", "def get_hypos(geo, st): j = api_request_to_json(f'hypocenter?geo={geo}&starttime={st}') d = pd.DataFrame(j['records']) if not d.empty: d['date']", "filename = f'{TMP}hypos{randint(0,9999):04d}.html' d = json_to_dataframe(st, data) m = None if region ==", "size=4 ) }) return { 'data': traces, 'layout': { 'margin': { 't': 30", "d['date'] = d['date'].str.slice(stop=-2) d['date'] = pd.to_datetime(d['date']) d.reset_index(drop=True, inplace=True) return d def get_hypos_map(st, kind,", "st): j = api_request_to_json(f'rtnet?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces = [] if not d.empty:", "create_tcircle_marker from .utils import ( api_request_to_json, json_to_dataframe, starttime_str_to_seconds, ) TMP = join(dirname(realpath(__file__)), '../tmp/')", "encoded_img = None if kind == 'A': encoded_img = be(open(f'{LCL}dlegend.png', 'rb').read()) elif kind", "x=d.index, y=d.so2, mode='markers', marker=dict(size=10) )] return { 'data': data, 'layout': { 'margin': {", "marker=dict(size=10) )] return { 'data': data, 'layout': { 'margin': { 't': 30 }", "'y': bins.depth, 'name': 'Count' }), go.Scatter( { 'x': d.date, 'y': d.cmoment, 'name': 'Moment',", "'t': 30 } } } def get_rtnet(ch, st): j = api_request_to_json(f'rtnet?channel={ch}&starttime={st}') d =", "pd import plotly.graph_objs as go import requests from base64 import b64encode as be", "def get_hypo_counts(st, data): d = json_to_dataframe(st, data) data = [] if not d.empty:", "'y': d['tangential'], 'name': f\"tangential {j['tangential_azimuth']:.1f}\" }) return { 'data': traces, 'layout': { 'margin':", "'x': d.index, 'y': d.up, 'name': 'Up', 'mode': 'markers', 'marker': dict( size=4 ) })", "be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_tiltv(region): chs = '' if region == 'kism':", "region == 'kism': m = Map(location=[19.41, -155.27], min_zoom=12, max_zoom=15, zoom_start=13, tiles='Stamen Terrain') elif", "'yaxis2': { 'title': 'Cumulative Moment (dyn-cm)', 'showgrid': False, 'overlaying': 'y', 'side': 'right' }", "10: return True return False def get_hypos(geo, st): j = api_request_to_json(f'hypocenter?geo={geo}&starttime={st}') d =", "y=d.windspeed, name='Wind Speed', mode='markers', marker=dict(size=6) ), go.Scatter( x=d.index, y=d.winddir, name='Wind Dir', yaxis='y2', mode='markers',", "= None if kind == 'A': encoded_img = be(open(f'{LCL}dlegend.png', 'rb').read()) elif kind ==", "pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.rsam, mode='markers', marker=dict(size=4)", "'yaxis': 'y2' })] return { 'data': data, 'layout': { 'margin': { 't': 30", "\\ f'&magnitude.0=F&azimuth.0=F&holeTemp.0=F&boxTemp.0=F&instVolt.0=F' \\ f'&rainfall.0=F&vs.0=&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_valve_plot(itm): filename", "as pd import plotly.graph_objs as go import requests from base64 import b64encode as", "return { 'data': data, 'layout': { 'margin': { 't': 30 }, 'xaxis': {", "headers = ['Post', 'Author', 'Date'] d = sorted(p, key=itemgetter('date'), reverse=True) link = 'https://hvointernal.wr.usgs.gov/hvo_logs/read?id={}'", "[d.index.min(), d.index.max()] }, 'yaxis': { 'range': [d.rsam.min() - 20, 2 * d.rsam.mean()] }", "join(dirname(realpath(__file__)), '../tmp/') LCL = join(dirname(realpath(__file__)), '../images/') def get_rsam(ch, st): j = api_request_to_json(f'rsam?channel={ch}&starttime={st}') data", "api_request_to_json('logs')['posts'] headers = ['Post', 'Author', 'Date'] d = sorted(p, key=itemgetter('date'), reverse=True) link =", "flask import request from folium import Map from operator import itemgetter from os.path", "'data': data, 'layout': { 'margin': { 't': 30 }, 'yaxis': { 'exponentformat': 'none'", "get_hypos_legend(kind): encoded_img = None if kind == 'A': encoded_img = be(open(f'{LCL}dlegend.png', 'rb').read()) elif", "inplace=True) data = [go.Scatter( x=d.index, y=d.so2, mode='markers', marker=dict(size=10) )] return { 'data': data,", "'A': encoded_img = be(open(f'{LCL}dlegend.png', 'rb').read()) elif kind == 'T': encoded_img = be(open(f'{LCL}tlegend.png', 'rb').read())", "d.empty: d.set_index('date', inplace=True) traces.append({ 'x': d.index, 'y': d.east, 'name': 'East', 'mode': 'markers', 'marker':", "inplace=True) return d.to_dict('records') def get_hypo_counts(st, data): d = json_to_dataframe(st, data) data = []", "seconds > (td.days * 86400 + td.seconds): return True # Data is old", "})] return { 'data': data, 'layout': { 'margin': { 't': 30 }, 'showlegend':", "30 }, 'yaxis': { 'title': 'Windspeed (m/s)' }, 'yaxis2': { 'title': 'Wind Direction", "d.set_index('date', inplace=True) traces.append({ 'x': d.index, 'y': d['radial'], 'name': f\"radial {j['used_azimuth']:.1f}\" }) traces.append({ 'x':", "+ [Tr([ Td(A(href=link.format(d[i]['id']), children='%s' % d[i]['subject'], target='_blank')), Td(children='%s' % d[i]['user']), Td(children='%s' % d[i]['date'])", "{ 'title': 'Earthquakes per Hour' }, 'yaxis2': { 'title': 'Cumulative Moment (dyn-cm)', 'showgrid':", "requests from base64 import b64encode as be from dash_html_components import Th, Tr, Td,", "d.apply(create_dcircle_marker, args=(m,), axis=1) m.save(filename) return open(filename, 'r').read() def get_hypos_legend(kind): encoded_img = None if", "data): if not data: return True now = datetime.now() olddata = pd.read_json(data) mindate", "size=4 ) }) traces.append({ 'x': d.index, 'y': d.up, 'name': 'Up', 'mode': 'markers', 'marker':", "d['cmoment'] = d.moment.cumsum() bins = d.groupby(pd.Grouper(freq='60min', key='date')).count() data = [go.Bar( { 'x': bins.index,", "get_nps_so2(ch, st): j = api_request_to_json(f'npsadvisory?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if not", "url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=1740&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=240&mh.0=900&chCnt.0=7&src.0=hvo_def_tilt' \\ f'&st.0=-28800000&et.0=N&lg.0=true&ch.0={chs}' \\ f'&dataTypes.0=NaN&plotType.0=tv&rk.0=1&ds.0=None&dsInt.0=&sdt.0=' \\ f'&az.0=n&azval.0=&linetype.0=l&ysLMin.0=&ysLMax.0=&ysRMin.0=' \\ f'&ysRMax.0=&despike_period.0=&filter_arg1.0=&filter_arg2.0='", "'name': 'East', 'mode': 'markers', 'marker': dict( size=4 ) }) traces.append({ 'x': d.index, 'y':", "src.format(now.year, d, t) def get_helicorder(ch): url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=636&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=576&mh.0=900&chCnt.0=1' \\ f'&src.0=hvo_seismic_winston_helicorders&st.0=-28800000&et.0=N' \\", "} } } def get_tilt(ch, st): j = api_request_to_json(f'tilt?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces", "== 'A': encoded_img = be(open(f'{LCL}dlegend.png', 'rb').read()) elif kind == 'T': encoded_img = be(open(f'{LCL}tlegend.png',", "datetime, timedelta from flask import request from folium import Map from operator import", "api_request_to_json(f'rsam?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data", "if (td.seconds / 60) > 10: return True return False def get_hypos(geo, st):", "def get_so2emissions(ch, st): j = api_request_to_json(f'so2emissions?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if", "def get_rsam(ch, st): j = api_request_to_json(f'rsam?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if", "per Hour' }, 'yaxis2': { 'title': 'Cumulative Moment (dyn-cm)', 'showgrid': False, 'overlaying': 'y',", "random import randint from requests.auth import HTTPBasicAuth from .maputils import create_dcircle_marker, create_tcircle_marker from", "= be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_tiltv(region): chs = '' if region ==", "= json_to_dataframe(st, data) if not d.empty: d.sort_values('date', inplace=True) return d.to_dict('records') def get_hypo_counts(st, data):", "d.sort_values('date', inplace=True) return d.to_dict('records') def get_hypo_counts(st, data): d = json_to_dataframe(st, data) data =", "d.empty: d.sort_values('date', inplace=True) return d.to_dict('records') def get_hypo_counts(st, data): d = json_to_dataframe(st, data) data", "elif region == 'lerz': m = Map(location=[19.43, -154.88], min_zoom=11, max_zoom=15, zoom_start=11, tiles='Stamen Terrain')", "30 }, 'yaxis': { 'exponentformat': 'none' } } } def get_nps_wind(ch, st): url", "json_to_dataframe(st, data) data = [] if not d.empty: d.sort_values('date', inplace=True) d['moment'] = d.prefMag.apply(lambda", "d['date'].str.slice(stop=-2) d['date'] = pd.to_datetime(d['date']) d.reset_index(drop=True, inplace=True) return d def get_hypos_map(st, kind, data, region):", "= ('https://volcanoes.usgs.gov/vsc/captures/ash3d/' '332010_1008443_D_deposit.gif') return url def get_logs(max_rows=20): p = api_request_to_json('logs')['posts'] headers = ['Post',", "= '15,16' url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=1740&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=240&mh.0=900&chCnt.0=7&src.0=hvo_def_tilt' \\ f'&st.0=-28800000&et.0=N&lg.0=true&ch.0={chs}' \\ f'&dataTypes.0=NaN&plotType.0=tv&rk.0=1&ds.0=None&dsInt.0=&sdt.0=' \\ f'&az.0=n&azval.0=&linetype.0=l&ysLMin.0=&ysLMax.0=&ysRMin.0='", "str(tm.hour).zfill(2), str(tm.minute).zfill(2)) return src.format(now.year, d, t) def get_helicorder(ch): url = f'a=plot&o=png&tz=Pacific/Honolulu&w=900&h=636&n=1&x.0=75&y.0=20' \\ f'&w.0=750&h.0=576&mh.0=900&chCnt.0=1'", "not d.empty: d['date'] = d['date'].str.slice(stop=-2) d['date'] = pd.to_datetime(d['date']) d.reset_index(drop=True, inplace=True) return d def", "def get_tiltv(region): chs = '' if region == 'kism': chs = '18,20' elif", "'layout': { 'margin': { 't': 30 }, 'showlegend': False, 'yaxis': { 'title': 'Earthquakes", "Map(location=[19.43, -154.88], min_zoom=11, max_zoom=15, zoom_start=11, tiles='Stamen Terrain') if kind == 'T': mid =", "None if region == 'kism': m = Map(location=[19.41, -155.27], min_zoom=12, max_zoom=15, zoom_start=13, tiles='Stamen", "go.Scatter( { 'x': d.date, 'y': d.cmoment, 'name': 'Moment', 'yaxis': 'y2' })] return {", "'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_tiltv(region): chs = '' if region == 'kism': chs", "api_request_to_json(f'tilt?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces = [] if not d.empty: d.set_index('date', inplace=True) traces.append({", "region == 'kism': chs = '18,20' elif region == 'merz': chs = '15,16'", "d.east, 'name': 'East', 'mode': 'markers', 'marker': dict( size=4 ) }) traces.append({ 'x': d.index,", "key=itemgetter('date'), reverse=True) link = 'https://hvointernal.wr.usgs.gov/hvo_logs/read?id={}' return [[Tr([Th(col) for col in headers])] + [Tr([", "import b64encode as be from dash_html_components import Th, Tr, Td, A from datetime", "= be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_valve_plot(itm): filename = f'{TMP}valve{randint(0,9999):04d}.jpg' url = f'https://hvovalve.wr.usgs.gov/valve3/valve3.jsp?{itm}'", "-*- import pandas as pd import plotly.graph_objs as go import requests from base64", "mindate = olddata.date.min() maxdate = olddata.date.max() td = now - mindate # Requested", "olddata = pd.read_json(data) mindate = olddata.date.min() maxdate = olddata.date.max() td = now -", "{ 'data': data, 'layout': { 'margin': { 't': 30 }, 'yaxis': { 'title':", "= datetime.utcnow() d = now.timetuple().tm_yday tm = now - timedelta(minutes=now.minute % 10, seconds=now.second,", "= olddata.date.max() td = now - mindate # Requested more than is currently", "[go.Scatter( x=d.index, y=d.so2, mode='markers', marker=dict(size=10) )] return { 'data': data, 'layout': { 'margin':", "currently stored? seconds = starttime_str_to_seconds(st) if seconds > (td.days * 86400 + td.seconds):", "region): filename = f'{TMP}hypos{randint(0,9999):04d}.html' d = json_to_dataframe(st, data) m = None if region", "d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.avgso2,", "= (f'npsadvisory?channel={ch}&starttime={st}&series=windspeed,winddir') j = api_request_to_json(url) data = [] d = pd.DataFrame(j['records'][ch]) if not", "def get_spectrogram(src): now = datetime.utcnow() d = now.timetuple().tm_yday tm = now - timedelta(minutes=now.minute", "= pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.rsam, mode='markers',", "= pd.to_datetime(d['date']) d.reset_index(drop=True, inplace=True) return d def get_hypos_map(st, kind, data, region): filename =", "inplace=True) traces.append({ 'x': d.index, 'y': d['radial'], 'name': f\"radial {j['used_azimuth']:.1f}\" }) traces.append({ 'x': d.index,", "'name': 'North', 'mode': 'markers', 'marker': dict( size=4 ) }) traces.append({ 'x': d.index, 'y':", "= f'https://hvovalve.wr.usgs.gov/valve3/valve3.jsp?{itm}' u = request.authorization.username p = request.authorization.password r = requests.get(url, auth=HTTPBasicAuth(u, p))", "-*- coding: utf-8 -*- import pandas as pd import plotly.graph_objs as go import", "mid = d.date.min() mad = d.date.max() d.apply(create_tcircle_marker, arg=(m, mid, mad), axis=1) elif kind", "pd.DataFrame(j['records'][ch]) traces = [] if not d.empty: d.set_index('date', inplace=True) traces.append({ 'x': d.index, 'y':", "d def get_hypos_map(st, kind, data, region): filename = f'{TMP}hypos{randint(0,9999):04d}.html' d = json_to_dataframe(st, data)", "'r').read() def get_hypos_legend(kind): encoded_img = None if kind == 'A': encoded_img = be(open(f'{LCL}dlegend.png',", "requests.auth import HTTPBasicAuth from .maputils import create_dcircle_marker, create_tcircle_marker from .utils import ( api_request_to_json,", "data = [go.Scatter( x=d.index, y=d.avgso2, mode='markers', marker=dict(size=6) )] return { 'data': data, 'layout':", "def get_rtnet(ch, st): j = api_request_to_json(f'rtnet?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces = [] if", "d = json_to_dataframe(st, data) data = [] if not d.empty: d.sort_values('date', inplace=True) d['moment']", "'x': bins.index, 'y': bins.depth, 'name': 'Count' }), go.Scatter( { 'x': d.date, 'y': d.cmoment,", "[] d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index,", "inplace=True) traces.append({ 'x': d.index, 'y': d.east, 'name': 'East', 'mode': 'markers', 'marker': dict( size=4", "'overlaying': 'y', 'side': 'right' } } } def get_spectrogram(src): now = datetime.utcnow() d", "get_and_store_hypos(geo, st, current_data): if is_data_needed(st, current_data): return get_hypos(geo, st).to_json() else: return current_data def", "{ 'margin': { 't': 30 } } } def get_nps_so2(ch, st): j =", "'margin': { 't': 30 }, 'yaxis': { 'exponentformat': 'none' } } } def", "get_spectrogram(src): now = datetime.utcnow() d = now.timetuple().tm_yday tm = now - timedelta(minutes=now.minute %", "= api_request_to_json(url) data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True)", "Speed', mode='markers', marker=dict(size=6) ), go.Scatter( x=d.index, y=d.winddir, name='Wind Dir', yaxis='y2', mode='markers', marker=dict(size=6) )]", "{ 'margin': { 't': 30 }, 'yaxis': { 'title': 'Windspeed (m/s)' }, 'yaxis2':", "'yaxis': { 'title': 'Earthquakes per Hour' }, 'yaxis2': { 'title': 'Cumulative Moment (dyn-cm)',", "'yaxis': { 'exponentformat': 'none' } } } def get_nps_wind(ch, st): url = (f'npsadvisory?channel={ch}&starttime={st}&series=windspeed,winddir')", "\\ f'&sc.0=T&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_tiltv(region): chs = ''", "not d.empty: d.set_index('date', inplace=True) data = [go.Scatter( x=d.index, y=d.windspeed, name='Wind Speed', mode='markers', marker=dict(size=6)", "[go.Scatter( x=d.index, y=d.windspeed, name='Wind Speed', mode='markers', marker=dict(size=6) ), go.Scatter( x=d.index, y=d.winddir, name='Wind Dir',", "api_request_to_json(f'so2emissions?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if not d.empty: d.set_index('date', inplace=True) data", "be(open(f'{LCL}dlegend.png', 'rb').read()) elif kind == 'T': encoded_img = be(open(f'{LCL}tlegend.png', 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def", "= '' if region == 'kism': chs = '18,20' elif region == 'merz':", "= json_to_dataframe(st, data) m = None if region == 'kism': m = Map(location=[19.41,", "['Post', 'Author', 'Date'] d = sorted(p, key=itemgetter('date'), reverse=True) link = 'https://hvointernal.wr.usgs.gov/hvo_logs/read?id={}' return [[Tr([Th(col)", "get_rtnet(ch, st): j = api_request_to_json(f'rtnet?channel={ch}&starttime={st}') d = pd.DataFrame(j['records'][ch]) traces = [] if not", "str(now.month).zfill(2), str(now.day).zfill(2), str(tm.hour).zfill(2), str(tm.minute).zfill(2)) else: t = '%d%s-%s%s' % (now.year, str(d).zfill(3), str(tm.hour).zfill(2), str(tm.minute).zfill(2))", "for col in headers])] + [Tr([ Td(A(href=link.format(d[i]['id']), children='%s' % d[i]['subject'], target='_blank')), Td(children='%s' %", "import datetime, timedelta from flask import request from folium import Map from operator", "30 } } } def get_and_store_hypos(geo, st, current_data): if is_data_needed(st, current_data): return get_hypos(geo,", "'layout': { 'margin': { 't': 30 } } } def get_rtnet(ch, st): j", "f'&magnitude.0=F&azimuth.0=F&holeTemp.0=F&boxTemp.0=F&instVolt.0=F' \\ f'&rainfall.0=F&vs.0=&plotSeparately.0=false' encoded_img = be(open(get_valve_plot(url), 'rb').read()) return f\"data:image/jpg;base64,{encoded_img.decode('utf8')}\" def get_valve_plot(itm): filename =", "traces.append({ 'x': d.index, 'y': d.up, 'name': 'Up', 'mode': 'markers', 'marker': dict( size=4 )", "url = (f'npsadvisory?channel={ch}&starttime={st}&series=windspeed,winddir') j = api_request_to_json(url) data = [] d = pd.DataFrame(j['records'][ch]) if", "randint from requests.auth import HTTPBasicAuth from .maputils import create_dcircle_marker, create_tcircle_marker from .utils import", "} } } def get_and_store_hypos(geo, st, current_data): if is_data_needed(st, current_data): return get_hypos(geo, st).to_json()", "mad), axis=1) elif kind == 'A': d.apply(create_dcircle_marker, args=(m,), axis=1) m.save(filename) return open(filename, 'r').read()", "'Author', 'Date'] d = sorted(p, key=itemgetter('date'), reverse=True) link = 'https://hvointernal.wr.usgs.gov/hvo_logs/read?id={}' return [[Tr([Th(col) for", "'layout': { 'margin': { 't': 30 }, 'yaxis': { 'title': 'Windspeed (m/s)' },", "current_data): if is_data_needed(st, current_data): return get_hypos(geo, st).to_json() else: return current_data def is_data_needed(st, data):", "as be from dash_html_components import Th, Tr, Td, A from datetime import datetime,", "'yaxis': { 'range': [d.rsam.min() - 20, 2 * d.rsam.mean()] } } } def", "get_rsam(ch, st): j = api_request_to_json(f'rsam?channel={ch}&starttime={st}') data = [] d = pd.DataFrame(j['records'][ch]) if not" ]
[ "label2id = json.loads(open(\"./label2id.json\").read()) id2label = [k for k, v in label2id.items()] def process_one_example_p(tokenizer,", "(input_ids, input_mask, segment_ids) return feature def load_model(model_folder): # We retrieve our checkpoint fullpath", "restore the graph weights sess_ = tf.Session() saver.restore(sess_, input_checkpoint) # opts = sess_.graph.get_operations()", "zip(\"\".join(data), result): if re.search(\"^[BS]\", t): if start is not None: label = result[index", "1][2:] if labels.get(label): te_ = text[start:index] # print(te_, labels) labels[label][te_] = [[start, index", "input_mask = [1] * len(input_ids) while len(input_ids) < max_seq_len: input_ids.append(0) input_mask.append(0) segment_ids.append(0) label_ids.append(0)", "TensorFlow to control on which device it will load operations clear_devices = True", "labels[label] = {te_: [[start, index - 1]]} # print(labels) return labels def submit(path):", "saver.restore(sess_, input_checkpoint) # opts = sess_.graph.get_operations() # for v in opts: # print(v.name)", "feature = process_one_example_p(tokenizer_, i, max_seq_len=64) features.append(feature) feed = {input_ids: [feature[0] for feature in", "te_ = text[start:index] # print(te_, labels) labels[label] = {te_: [[start, index - 1]]}", "json.loads(open(\"./label2id.json\").read()) id2label = [k for k, v in label2id.items()] def process_one_example_p(tokenizer, text, max_seq_len=128):", "device it will load operations clear_devices = True tf.reset_default_graph() # We import the", "for feature in features], input_mask: [feature[1] for feature in features], segment_ids: [feature[2] for", "Saver saver = tf.train.import_meta_graph(input_checkpoint + '.meta', clear_devices=clear_devices) # We start a session and", "assert len(input_mask) == max_seq_len assert len(segment_ids) == max_seq_len feature = (input_ids, input_mask, segment_ids)", "not None: label = result[index - 1][2:] if labels.get(label): te_ = text[start:index] #", "for i, token in enumerate(tokens): ntokens.append(token) segment_ids.append(0) # label_ids.append(label2id[labels[i]]) ntokens.append(\"[SEP]\") segment_ids.append(0) input_ids =", "max_seq_len=128): textlist = list(text) tokens = [] # labels = [] for i,", "if not line.strip(): continue _ = json.loads(line.strip()) res = predict(_[\"text\"]) data.append(json.dumps({\"label\": res}, ensure_ascii=False))", "process_one_example_p(tokenizer, text, max_seq_len=128): textlist = list(text) tokens = [] # labels = []", "# 句子开始设置CLS 标志 segment_ids.append(0) for i, token in enumerate(tokens): ntokens.append(token) segment_ids.append(0) # label_ids.append(label2id[labels[i]])", "features], keep_prob: 1.0 } [probs] = sess.run([p], feed) result = [] for index,", "- 1][2:] if labels.get(label): te_ = text[start:index] # print(te_, labels) labels[label][te_] = [[start,", "= {te_: [[start, index - 1]]} # else: # print(start, labels) start =", "# 逐个分成 最大62长度的 text 进行 batch 预测 features = [] for i in", "sess.run([p], feed) result = [] for index, prob in enumerate(probs): for v in", "ntokens.append(\"**NULL**\") assert len(input_ids) == max_seq_len assert len(input_mask) == max_seq_len assert len(segment_ids) == max_seq_len", "for v in prob[1:len(data[index]) + 1]: result.append(id2label[int(v)]) print(result) labels = {} start =", "# print(te_, labels) labels[label] = {te_: [[start, index - 1]]} start = index", "[feature[2] for feature in features], keep_prob: 1.0 } [probs] = sess.run([p], feed) result", "features.append(feature) feed = {input_ids: [feature[0] for feature in features], input_mask: [feature[1] for feature", "label2id.items()] def process_one_example_p(tokenizer, text, max_seq_len=128): textlist = list(text) tokens = [] # labels", "for i, word in enumerate(textlist): token = tokenizer.tokenize(word) # print(token) tokens.extend(token) if len(tokens)", "== max_seq_len feature = (input_ids, input_mask, segment_ids) return feature def load_model(model_folder): # We", "feature = (input_ids, input_mask, segment_ids) return feature def load_model(model_folder): # We retrieve our", "i, max_seq_len=64) features.append(feature) feed = {input_ids: [feature[0] for feature in features], input_mask: [feature[1]", "json import tensorflow as tf import tokenization os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" vocab_file = \"./vocab.txt\"", "labels.get(label): te_ = text[start:index] # print(te_, labels) labels[label][te_] = [[start, index - 1]]", "index # print(start) if re.search(\"^O\", t): if start is not None: # print(start)", "json.loads(line.strip()) res = predict(_[\"text\"]) data.append(json.dumps({\"label\": res}, ensure_ascii=False)) open(\"ner_predict.json\", \"w\").write(\"\\n\".join(data)) if __name__ == \"__main__\":", "2019-12-07 20:51 \"\"\" import os import re import json import tensorflow as tf", "= [] for i, word in enumerate(textlist): token = tokenizer.tokenize(word) # print(token) tokens.extend(token)", "model_folder, repr(e)) # We clear devices to allow TensorFlow to control on which", "= sess.graph.get_tensor_by_name(\"input_ids:0\") input_mask = sess.graph.get_tensor_by_name(\"input_mask:0\") # is_training segment_ids = sess.graph.get_tensor_by_name(\"segment_ids:0\") # fc/dense/Relu cnn_block/Reshape", "句子开始设置CLS 标志 segment_ids.append(0) for i, token in enumerate(tokens): ntokens.append(token) segment_ids.append(0) # label_ids.append(label2id[labels[i]]) ntokens.append(\"[SEP]\")", "v in prob[1:len(data[index]) + 1]: result.append(id2label[int(v)]) print(result) labels = {} start = None", "batch 预测 features = [] for i in data: feature = process_one_example_p(tokenizer_, i,", "import tensorflow as tf import tokenization os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" vocab_file = \"./vocab.txt\" tokenizer_", "= None index = 0 for w, t in zip(\"\".join(data), result): if re.search(\"^[BS]\",", "= (input_ids, input_mask, segment_ids) return feature def load_model(model_folder): # We retrieve our checkpoint", "in enumerate(textlist): token = tokenizer.tokenize(word) # print(token) tokens.extend(token) if len(tokens) >= max_seq_len -", "graph and retrieve a Saver saver = tf.train.import_meta_graph(input_checkpoint + '.meta', clear_devices=clear_devices) # We", "# fc/dense/Relu cnn_block/Reshape keep_prob = sess.graph.get_tensor_by_name(\"keep_prob:0\") p = sess.graph.get_tensor_by_name(\"loss/ReverseSequence_1:0\") def predict(text): data =", "start is not None: # print(start) label = result[index - 1][2:] if labels.get(label):", "i in data: feature = process_one_example_p(tokenizer_, i, max_seq_len=64) features.append(feature) feed = {input_ids: [feature[0]", "our checkpoint fullpath try: checkpoint = tf.train.get_checkpoint_state(model_folder) input_checkpoint = checkpoint.model_checkpoint_path print(\"[INFO] input_checkpoint:\", input_checkpoint)", "= process_one_example_p(tokenizer_, i, max_seq_len=64) features.append(feature) feed = {input_ids: [feature[0] for feature in features],", "input_ids.append(0) input_mask.append(0) segment_ids.append(0) label_ids.append(0) ntokens.append(\"**NULL**\") assert len(input_ids) == max_seq_len assert len(input_mask) == max_seq_len", "1]: result.append(id2label[int(v)]) print(result) labels = {} start = None index = 0 for", "= tokenizer.convert_tokens_to_ids(ntokens) input_mask = [1] * len(input_ids) while len(input_ids) < max_seq_len: input_ids.append(0) input_mask.append(0)", "[1] * len(input_ids) while len(input_ids) < max_seq_len: input_ids.append(0) input_mask.append(0) segment_ids.append(0) label_ids.append(0) ntokens.append(\"**NULL**\") assert", "[] label_ids = [] ntokens.append(\"[CLS]\") # 句子开始设置CLS 标志 segment_ids.append(0) for i, token in", "i, token in enumerate(tokens): ntokens.append(token) segment_ids.append(0) # label_ids.append(label2id[labels[i]]) ntokens.append(\"[SEP]\") segment_ids.append(0) input_ids = tokenizer.convert_tokens_to_ids(ntokens)", "token = tokenizer.tokenize(word) # print(token) tokens.extend(token) if len(tokens) >= max_seq_len - 1: tokens", "len(input_ids) while len(input_ids) < max_seq_len: input_ids.append(0) input_mask.append(0) segment_ids.append(0) label_ids.append(0) ntokens.append(\"**NULL**\") assert len(input_ids) ==", "[] for line in open(path): if not line.strip(): continue _ = json.loads(line.strip()) res", "= index # print(start) if re.search(\"^O\", t): if start is not None: #", "segment_ids) return feature def load_model(model_folder): # We retrieve our checkpoint fullpath try: checkpoint", "tokens[0:(max_seq_len - 2)] # labels = labels[0:(max_seq_len - 2)] ntokens = [] segment_ids", "ntokens.append(\"[CLS]\") # 句子开始设置CLS 标志 segment_ids.append(0) for i, token in enumerate(tokens): ntokens.append(token) segment_ids.append(0) #", "- 1]]} start = index # print(start) if re.search(\"^O\", t): if start is", "i, word in enumerate(textlist): token = tokenizer.tokenize(word) # print(token) tokens.extend(token) if len(tokens) >=", "index - 1]] else: te_ = text[start:index] # print(te_, labels) labels[label] = {te_:", "segment_ids.append(0) # label_ids.append(label2id[labels[i]]) ntokens.append(\"[SEP]\") segment_ids.append(0) input_ids = tokenizer.convert_tokens_to_ids(ntokens) input_mask = [1] * len(input_ids)", "_ = json.loads(line.strip()) res = predict(_[\"text\"]) data.append(json.dumps({\"label\": res}, ensure_ascii=False)) open(\"ner_predict.json\", \"w\").write(\"\\n\".join(data)) if __name__", "= [text] # 逐个分成 最大62长度的 text 进行 batch 预测 features = [] for", "print(te_, labels) labels[label] = {te_: [[start, index - 1]]} start = index #", "result[index - 1][2:] if labels.get(label): te_ = text[start:index] # print(te_, labels) labels[label][te_] =", "feature in features], segment_ids: [feature[2] for feature in features], keep_prob: 1.0 } [probs]", "def load_model(model_folder): # We retrieve our checkpoint fullpath try: checkpoint = tf.train.get_checkpoint_state(model_folder) input_checkpoint", "is not None: # print(start) label = result[index - 1][2:] if labels.get(label): te_", "fc/dense/Relu cnn_block/Reshape keep_prob = sess.graph.get_tensor_by_name(\"keep_prob:0\") p = sess.graph.get_tensor_by_name(\"loss/ReverseSequence_1:0\") def predict(text): data = [text]", "result[start][2:] if labels.get(label): te_ = text[start:index] # print(te_, labels) labels[label][te_] = [[start, index", "if re.search(\"^O\", t): if start is not None: # print(start) label = result[index", "def predict(text): data = [text] # 逐个分成 最大62长度的 text 进行 batch 预测 features", "as e: input_checkpoint = model_folder print(\"[INFO] Model folder\", model_folder, repr(e)) # We clear", "prob[1:len(data[index]) + 1]: result.append(id2label[int(v)]) print(result) labels = {} start = None index =", "else: # print(start, labels) start = None index += 1 if start is", "1.0 } [probs] = sess.run([p], feed) result = [] for index, prob in", "def process_one_example_p(tokenizer, text, max_seq_len=128): textlist = list(text) tokens = [] # labels =", "逐个分成 最大62长度的 text 进行 batch 预测 features = [] for i in data:", "\"\"\" import os import re import json import tensorflow as tf import tokenization", "keep_prob: 1.0 } [probs] = sess.run([p], feed) result = [] for index, prob", "标志 segment_ids.append(0) for i, token in enumerate(tokens): ntokens.append(token) segment_ids.append(0) # label_ids.append(label2id[labels[i]]) ntokens.append(\"[SEP]\") segment_ids.append(0)", "retrieve a Saver saver = tf.train.import_meta_graph(input_checkpoint + '.meta', clear_devices=clear_devices) # We start a", "data = [text] # 逐个分成 最大62长度的 text 进行 batch 预测 features = []", "labels = [] for i, word in enumerate(textlist): token = tokenizer.tokenize(word) # print(token)", "on which device it will load operations clear_devices = True tf.reset_default_graph() # We", "max_seq_len: input_ids.append(0) input_mask.append(0) segment_ids.append(0) label_ids.append(0) ntokens.append(\"**NULL**\") assert len(input_ids) == max_seq_len assert len(input_mask) ==", "= sess.run([p], feed) result = [] for index, prob in enumerate(probs): for v", "in enumerate(probs): for v in prob[1:len(data[index]) + 1]: result.append(id2label[int(v)]) print(result) labels = {}", "re.search(\"^[BS]\", t): if start is not None: label = result[index - 1][2:] if", "text[start:index] # print(te_, labels) labels[label] = {te_: [[start, index - 1]]} start =", "\"./ner_bert_base/\" sess = load_model(model_path) input_ids = sess.graph.get_tensor_by_name(\"input_ids:0\") input_mask = sess.graph.get_tensor_by_name(\"input_mask:0\") # is_training segment_ids", "k, v in label2id.items()] def process_one_example_p(tokenizer, text, max_seq_len=128): textlist = list(text) tokens =", "in label2id.items()] def process_one_example_p(tokenizer, text, max_seq_len=128): textlist = list(text) tokens = [] #", "continue _ = json.loads(line.strip()) res = predict(_[\"text\"]) data.append(json.dumps({\"label\": res}, ensure_ascii=False)) open(\"ner_predict.json\", \"w\").write(\"\\n\".join(data)) if", "labels) labels[label][te_] = [[start, index - 1]] else: te_ = text[start:index] # print(te_,", "re.search(\"^O\", t): if start is not None: # print(start) label = result[index -", "# print(start) if re.search(\"^O\", t): if start is not None: # print(start) label", "We import the meta graph and retrieve a Saver saver = tf.train.import_meta_graph(input_checkpoint +", "start = None index = 0 for w, t in zip(\"\".join(data), result): if", "We clear devices to allow TensorFlow to control on which device it will", "print(start, labels) start = None index += 1 if start is not None:", "= json.loads(line.strip()) res = predict(_[\"text\"]) data.append(json.dumps({\"label\": res}, ensure_ascii=False)) open(\"ner_predict.json\", \"w\").write(\"\\n\".join(data)) if __name__ ==", "= {input_ids: [feature[0] for feature in features], input_mask: [feature[1] for feature in features],", "== max_seq_len assert len(segment_ids) == max_seq_len feature = (input_ids, input_mask, segment_ids) return feature", "t in zip(\"\".join(data), result): if re.search(\"^[BS]\", t): if start is not None: label", "index - 1]]} # print(labels) return labels def submit(path): data = [] for", "te_ = text[start:index] # print(te_, labels) labels[label][te_] = [[start, index - 1]] else:", "# We import the meta graph and retrieve a Saver saver = tf.train.import_meta_graph(input_checkpoint", "tokens = tokens[0:(max_seq_len - 2)] # labels = labels[0:(max_seq_len - 2)] ntokens =", "while len(input_ids) < max_seq_len: input_ids.append(0) input_mask.append(0) segment_ids.append(0) label_ids.append(0) ntokens.append(\"**NULL**\") assert len(input_ids) == max_seq_len", "# print(start, labels) start = None index += 1 if start is not", "label_ids.append(0) ntokens.append(\"**NULL**\") assert len(input_ids) == max_seq_len assert len(input_mask) == max_seq_len assert len(segment_ids) ==", "if re.search(\"^[BS]\", t): if start is not None: label = result[index - 1][2:]", "coding:utf8 \"\"\" @author: <NAME> @time: 2019-12-07 20:51 \"\"\" import os import re import", "input_ids = tokenizer.convert_tokens_to_ids(ntokens) input_mask = [1] * len(input_ids) while len(input_ids) < max_seq_len: input_ids.append(0)", "input_checkpoint:\", input_checkpoint) except Exception as e: input_checkpoint = model_folder print(\"[INFO] Model folder\", model_folder,", "# print(start) label = result[index - 1][2:] if labels.get(label): te_ = text[start:index] #", "import json import tensorflow as tf import tokenization os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" vocab_file =", "vocab_file = \"./vocab.txt\" tokenizer_ = tokenization.FullTokenizer(vocab_file=vocab_file) label2id = json.loads(open(\"./label2id.json\").read()) id2label = [k for", "segment_ids.append(0) input_ids = tokenizer.convert_tokens_to_ids(ntokens) input_mask = [1] * len(input_ids) while len(input_ids) < max_seq_len:", "# labels = labels[0:(max_seq_len - 2)] ntokens = [] segment_ids = [] label_ids", "= sess.graph.get_tensor_by_name(\"segment_ids:0\") # fc/dense/Relu cnn_block/Reshape keep_prob = sess.graph.get_tensor_by_name(\"keep_prob:0\") p = sess.graph.get_tensor_by_name(\"loss/ReverseSequence_1:0\") def predict(text):", "index += 1 if start is not None: # print(start) label = result[start][2:]", "except Exception as e: input_checkpoint = model_folder print(\"[INFO] Model folder\", model_folder, repr(e)) #", "input_checkpoint = checkpoint.model_checkpoint_path print(\"[INFO] input_checkpoint:\", input_checkpoint) except Exception as e: input_checkpoint = model_folder", "start = None index += 1 if start is not None: # print(start)", "len(tokens) >= max_seq_len - 1: tokens = tokens[0:(max_seq_len - 2)] # labels =", "v in opts: # print(v.name) return sess_ model_path = \"./ner_bert_base/\" sess = load_model(model_path)", "text 进行 batch 预测 features = [] for i in data: feature =", "labels = labels[0:(max_seq_len - 2)] ntokens = [] segment_ids = [] label_ids =", "sess_ = tf.Session() saver.restore(sess_, input_checkpoint) # opts = sess_.graph.get_operations() # for v in", "#!/usr/bin/python # coding:utf8 \"\"\" @author: <NAME> @time: 2019-12-07 20:51 \"\"\" import os import", "20:51 \"\"\" import os import re import json import tensorflow as tf import", "print(v.name) return sess_ model_path = \"./ner_bert_base/\" sess = load_model(model_path) input_ids = sess.graph.get_tensor_by_name(\"input_ids:0\") input_mask", "[feature[0] for feature in features], input_mask: [feature[1] for feature in features], segment_ids: [feature[2]", "result.append(id2label[int(v)]) print(result) labels = {} start = None index = 0 for w,", "labels = {} start = None index = 0 for w, t in", "if start is not None: # print(start) label = result[index - 1][2:] if", "in data: feature = process_one_example_p(tokenizer_, i, max_seq_len=64) features.append(feature) feed = {input_ids: [feature[0] for", "t): if start is not None: # print(start) label = result[index - 1][2:]", "not line.strip(): continue _ = json.loads(line.strip()) res = predict(_[\"text\"]) data.append(json.dumps({\"label\": res}, ensure_ascii=False)) open(\"ner_predict.json\",", "max_seq_len - 1: tokens = tokens[0:(max_seq_len - 2)] # labels = labels[0:(max_seq_len -", "import re import json import tensorflow as tf import tokenization os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\"", "return feature def load_model(model_folder): # We retrieve our checkpoint fullpath try: checkpoint =", "= load_model(model_path) input_ids = sess.graph.get_tensor_by_name(\"input_ids:0\") input_mask = sess.graph.get_tensor_by_name(\"input_mask:0\") # is_training segment_ids = sess.graph.get_tensor_by_name(\"segment_ids:0\")", "- 2)] # labels = labels[0:(max_seq_len - 2)] ntokens = [] segment_ids =", "None index += 1 if start is not None: # print(start) label =", "1]]} start = index # print(start) if re.search(\"^O\", t): if start is not", "- 1]]} # else: # print(start, labels) start = None index += 1", "ntokens.append(\"[SEP]\") segment_ids.append(0) input_ids = tokenizer.convert_tokens_to_ids(ntokens) input_mask = [1] * len(input_ids) while len(input_ids) <", "for line in open(path): if not line.strip(): continue _ = json.loads(line.strip()) res =", "# We clear devices to allow TensorFlow to control on which device it", "print(result) labels = {} start = None index = 0 for w, t", "1: tokens = tokens[0:(max_seq_len - 2)] # labels = labels[0:(max_seq_len - 2)] ntokens", "opts: # print(v.name) return sess_ model_path = \"./ner_bert_base/\" sess = load_model(model_path) input_ids =", "= [] for i in data: feature = process_one_example_p(tokenizer_, i, max_seq_len=64) features.append(feature) feed", "import the meta graph and retrieve a Saver saver = tf.train.import_meta_graph(input_checkpoint + '.meta',", "load_model(model_path) input_ids = sess.graph.get_tensor_by_name(\"input_ids:0\") input_mask = sess.graph.get_tensor_by_name(\"input_mask:0\") # is_training segment_ids = sess.graph.get_tensor_by_name(\"segment_ids:0\") #", "res}, ensure_ascii=False)) open(\"ner_predict.json\", \"w\").write(\"\\n\".join(data)) if __name__ == \"__main__\": text_ = \"梅塔利斯在乌克兰联赛、杯赛及联盟杯中保持9场不败,状态相当出色;\" res_ =", "retrieve our checkpoint fullpath try: checkpoint = tf.train.get_checkpoint_state(model_folder) input_checkpoint = checkpoint.model_checkpoint_path print(\"[INFO] input_checkpoint:\",", "# print(te_, labels) labels[label] = {te_: [[start, index - 1]]} # else: #", "label = result[index - 1][2:] if labels.get(label): te_ = text[start:index] # print(te_, labels)", "labels[0:(max_seq_len - 2)] ntokens = [] segment_ids = [] label_ids = [] ntokens.append(\"[CLS]\")", "# labels = [] for i, word in enumerate(textlist): token = tokenizer.tokenize(word) #", "print(start) label = result[index - 1][2:] if labels.get(label): te_ = text[start:index] # print(te_,", "in enumerate(tokens): ntokens.append(token) segment_ids.append(0) # label_ids.append(label2id[labels[i]]) ntokens.append(\"[SEP]\") segment_ids.append(0) input_ids = tokenizer.convert_tokens_to_ids(ntokens) input_mask =", "saver = tf.train.import_meta_graph(input_checkpoint + '.meta', clear_devices=clear_devices) # We start a session and restore", "[text] # 逐个分成 最大62长度的 text 进行 batch 预测 features = [] for i", "sess.graph.get_tensor_by_name(\"segment_ids:0\") # fc/dense/Relu cnn_block/Reshape keep_prob = sess.graph.get_tensor_by_name(\"keep_prob:0\") p = sess.graph.get_tensor_by_name(\"loss/ReverseSequence_1:0\") def predict(text): data", "print(start) if re.search(\"^O\", t): if start is not None: # print(start) label =", "text[start:index] # print(te_, labels) labels[label] = {te_: [[start, index - 1]]} # print(labels)", "print(\"[INFO] input_checkpoint:\", input_checkpoint) except Exception as e: input_checkpoint = model_folder print(\"[INFO] Model folder\",", "checkpoint.model_checkpoint_path print(\"[INFO] input_checkpoint:\", input_checkpoint) except Exception as e: input_checkpoint = model_folder print(\"[INFO] Model", "[] for i in data: feature = process_one_example_p(tokenizer_, i, max_seq_len=64) features.append(feature) feed =", "tokenizer.convert_tokens_to_ids(ntokens) input_mask = [1] * len(input_ids) while len(input_ids) < max_seq_len: input_ids.append(0) input_mask.append(0) segment_ids.append(0)", "print(te_, labels) labels[label][te_] = [[start, index - 1]] else: te_ = text[start:index] #", "print(te_, labels) labels[label] = {te_: [[start, index - 1]]} # else: # print(start,", "feature def load_model(model_folder): # We retrieve our checkpoint fullpath try: checkpoint = tf.train.get_checkpoint_state(model_folder)", "a Saver saver = tf.train.import_meta_graph(input_checkpoint + '.meta', clear_devices=clear_devices) # We start a session", "in features], keep_prob: 1.0 } [probs] = sess.run([p], feed) result = [] for", "= tokenization.FullTokenizer(vocab_file=vocab_file) label2id = json.loads(open(\"./label2id.json\").read()) id2label = [k for k, v in label2id.items()]", "if len(tokens) >= max_seq_len - 1: tokens = tokens[0:(max_seq_len - 2)] # labels", "allow TensorFlow to control on which device it will load operations clear_devices =", "is_training segment_ids = sess.graph.get_tensor_by_name(\"segment_ids:0\") # fc/dense/Relu cnn_block/Reshape keep_prob = sess.graph.get_tensor_by_name(\"keep_prob:0\") p = sess.graph.get_tensor_by_name(\"loss/ReverseSequence_1:0\")", "tf import tokenization os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" vocab_file = \"./vocab.txt\" tokenizer_ = tokenization.FullTokenizer(vocab_file=vocab_file) label2id", "the meta graph and retrieve a Saver saver = tf.train.import_meta_graph(input_checkpoint + '.meta', clear_devices=clear_devices)", "import tokenization os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" vocab_file = \"./vocab.txt\" tokenizer_ = tokenization.FullTokenizer(vocab_file=vocab_file) label2id =", "clear_devices=clear_devices) # We start a session and restore the graph weights sess_ =", "enumerate(tokens): ntokens.append(token) segment_ids.append(0) # label_ids.append(label2id[labels[i]]) ntokens.append(\"[SEP]\") segment_ids.append(0) input_ids = tokenizer.convert_tokens_to_ids(ntokens) input_mask = [1]", "[[start, index - 1]] else: te_ = text[start:index] # print(te_, labels) labels[label] =", "# print(labels) return labels def submit(path): data = [] for line in open(path):", "to control on which device it will load operations clear_devices = True tf.reset_default_graph()", "to allow TensorFlow to control on which device it will load operations clear_devices", "it will load operations clear_devices = True tf.reset_default_graph() # We import the meta", "max_seq_len assert len(input_mask) == max_seq_len assert len(segment_ids) == max_seq_len feature = (input_ids, input_mask,", "label_ids = [] ntokens.append(\"[CLS]\") # 句子开始设置CLS 标志 segment_ids.append(0) for i, token in enumerate(tokens):", "len(input_mask) == max_seq_len assert len(segment_ids) == max_seq_len feature = (input_ids, input_mask, segment_ids) return", "for v in opts: # print(v.name) return sess_ model_path = \"./ner_bert_base/\" sess =", "= \"./vocab.txt\" tokenizer_ = tokenization.FullTokenizer(vocab_file=vocab_file) label2id = json.loads(open(\"./label2id.json\").read()) id2label = [k for k,", "tokenization.FullTokenizer(vocab_file=vocab_file) label2id = json.loads(open(\"./label2id.json\").read()) id2label = [k for k, v in label2id.items()] def", "load_model(model_folder): # We retrieve our checkpoint fullpath try: checkpoint = tf.train.get_checkpoint_state(model_folder) input_checkpoint =", "None: # print(start) label = result[index - 1][2:] if labels.get(label): te_ = text[start:index]", "if start is not None: label = result[index - 1][2:] if labels.get(label): te_", "e: input_checkpoint = model_folder print(\"[INFO] Model folder\", model_folder, repr(e)) # We clear devices", "@author: <NAME> @time: 2019-12-07 20:51 \"\"\" import os import re import json import", "= sess.graph.get_tensor_by_name(\"loss/ReverseSequence_1:0\") def predict(text): data = [text] # 逐个分成 最大62长度的 text 进行 batch", "= None index += 1 if start is not None: # print(start) label", "data: feature = process_one_example_p(tokenizer_, i, max_seq_len=64) features.append(feature) feed = {input_ids: [feature[0] for feature", "tokenizer_ = tokenization.FullTokenizer(vocab_file=vocab_file) label2id = json.loads(open(\"./label2id.json\").read()) id2label = [k for k, v in", "and retrieve a Saver saver = tf.train.import_meta_graph(input_checkpoint + '.meta', clear_devices=clear_devices) # We start", "input_checkpoint = model_folder print(\"[INFO] Model folder\", model_folder, repr(e)) # We clear devices to", "# print(start) label = result[start][2:] if labels.get(label): te_ = text[start:index] # print(te_, labels)", "label = result[start][2:] if labels.get(label): te_ = text[start:index] # print(te_, labels) labels[label][te_] =", "open(path): if not line.strip(): continue _ = json.loads(line.strip()) res = predict(_[\"text\"]) data.append(json.dumps({\"label\": res},", "'.meta', clear_devices=clear_devices) # We start a session and restore the graph weights sess_", "tf.Session() saver.restore(sess_, input_checkpoint) # opts = sess_.graph.get_operations() # for v in opts: #", "token in enumerate(tokens): ntokens.append(token) segment_ids.append(0) # label_ids.append(label2id[labels[i]]) ntokens.append(\"[SEP]\") segment_ids.append(0) input_ids = tokenizer.convert_tokens_to_ids(ntokens) input_mask", "feature in features], input_mask: [feature[1] for feature in features], segment_ids: [feature[2] for feature", "segment_ids.append(0) for i, token in enumerate(tokens): ntokens.append(token) segment_ids.append(0) # label_ids.append(label2id[labels[i]]) ntokens.append(\"[SEP]\") segment_ids.append(0) input_ids", "process_one_example_p(tokenizer_, i, max_seq_len=64) features.append(feature) feed = {input_ids: [feature[0] for feature in features], input_mask:", "None index = 0 for w, t in zip(\"\".join(data), result): if re.search(\"^[BS]\", t):", "tokens = [] # labels = [] for i, word in enumerate(textlist): token", "- 1]] else: te_ = text[start:index] # print(te_, labels) labels[label] = {te_: [[start,", "[[start, index - 1]]} start = index # print(start) if re.search(\"^O\", t): if", "the graph weights sess_ = tf.Session() saver.restore(sess_, input_checkpoint) # opts = sess_.graph.get_operations() #", "w, t in zip(\"\".join(data), result): if re.search(\"^[BS]\", t): if start is not None:", "labels[label] = {te_: [[start, index - 1]]} start = index # print(start) if", "= \"./ner_bert_base/\" sess = load_model(model_path) input_ids = sess.graph.get_tensor_by_name(\"input_ids:0\") input_mask = sess.graph.get_tensor_by_name(\"input_mask:0\") # is_training", "features], segment_ids: [feature[2] for feature in features], keep_prob: 1.0 } [probs] = sess.run([p],", "a session and restore the graph weights sess_ = tf.Session() saver.restore(sess_, input_checkpoint) #", "feed = {input_ids: [feature[0] for feature in features], input_mask: [feature[1] for feature in", "print(labels) return labels def submit(path): data = [] for line in open(path): if", "{te_: [[start, index - 1]]} # else: # print(start, labels) start = None", "= [] label_ids = [] ntokens.append(\"[CLS]\") # 句子开始设置CLS 标志 segment_ids.append(0) for i, token", "{te_: [[start, index - 1]]} start = index # print(start) if re.search(\"^O\", t):", "for i in data: feature = process_one_example_p(tokenizer_, i, max_seq_len=64) features.append(feature) feed = {input_ids:", "folder\", model_folder, repr(e)) # We clear devices to allow TensorFlow to control on", "[[start, index - 1]]} # else: # print(start, labels) start = None index", "sess_ model_path = \"./ner_bert_base/\" sess = load_model(model_path) input_ids = sess.graph.get_tensor_by_name(\"input_ids:0\") input_mask = sess.graph.get_tensor_by_name(\"input_mask:0\")", "预测 features = [] for i in data: feature = process_one_example_p(tokenizer_, i, max_seq_len=64)", "* len(input_ids) while len(input_ids) < max_seq_len: input_ids.append(0) input_mask.append(0) segment_ids.append(0) label_ids.append(0) ntokens.append(\"**NULL**\") assert len(input_ids)", "= True tf.reset_default_graph() # We import the meta graph and retrieve a Saver", "predict(_[\"text\"]) data.append(json.dumps({\"label\": res}, ensure_ascii=False)) open(\"ner_predict.json\", \"w\").write(\"\\n\".join(data)) if __name__ == \"__main__\": text_ = \"梅塔利斯在乌克兰联赛、杯赛及联盟杯中保持9场不败,状态相当出色;\"", "tokens.extend(token) if len(tokens) >= max_seq_len - 1: tokens = tokens[0:(max_seq_len - 2)] #", "try: checkpoint = tf.train.get_checkpoint_state(model_folder) input_checkpoint = checkpoint.model_checkpoint_path print(\"[INFO] input_checkpoint:\", input_checkpoint) except Exception as", "control on which device it will load operations clear_devices = True tf.reset_default_graph() #", "label_ids.append(label2id[labels[i]]) ntokens.append(\"[SEP]\") segment_ids.append(0) input_ids = tokenizer.convert_tokens_to_ids(ntokens) input_mask = [1] * len(input_ids) while len(input_ids)", "tokenization os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" vocab_file = \"./vocab.txt\" tokenizer_ = tokenization.FullTokenizer(vocab_file=vocab_file) label2id = json.loads(open(\"./label2id.json\").read())", "sess.graph.get_tensor_by_name(\"keep_prob:0\") p = sess.graph.get_tensor_by_name(\"loss/ReverseSequence_1:0\") def predict(text): data = [text] # 逐个分成 最大62长度的 text", "[] for i, word in enumerate(textlist): token = tokenizer.tokenize(word) # print(token) tokens.extend(token) if", "= {te_: [[start, index - 1]]} start = index # print(start) if re.search(\"^O\",", "= [k for k, v in label2id.items()] def process_one_example_p(tokenizer, text, max_seq_len=128): textlist =", "tensorflow as tf import tokenization os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" vocab_file = \"./vocab.txt\" tokenizer_ =", "labels[label] = {te_: [[start, index - 1]]} # else: # print(start, labels) start", "= [] for line in open(path): if not line.strip(): continue _ = json.loads(line.strip())", "- 2)] ntokens = [] segment_ids = [] label_ids = [] ntokens.append(\"[CLS]\") #", "open(\"ner_predict.json\", \"w\").write(\"\\n\".join(data)) if __name__ == \"__main__\": text_ = \"梅塔利斯在乌克兰联赛、杯赛及联盟杯中保持9场不败,状态相当出色;\" res_ = predict(text_) print(res_)", "p = sess.graph.get_tensor_by_name(\"loss/ReverseSequence_1:0\") def predict(text): data = [text] # 逐个分成 最大62长度的 text 进行", "feature in features], keep_prob: 1.0 } [probs] = sess.run([p], feed) result = []", "for feature in features], segment_ids: [feature[2] for feature in features], keep_prob: 1.0 }", "2)] # labels = labels[0:(max_seq_len - 2)] ntokens = [] segment_ids = []", "# We start a session and restore the graph weights sess_ = tf.Session()", "[] for index, prob in enumerate(probs): for v in prob[1:len(data[index]) + 1]: result.append(id2label[int(v)])", "for w, t in zip(\"\".join(data), result): if re.search(\"^[BS]\", t): if start is not", "for index, prob in enumerate(probs): for v in prob[1:len(data[index]) + 1]: result.append(id2label[int(v)]) print(result)", "input_checkpoint) except Exception as e: input_checkpoint = model_folder print(\"[INFO] Model folder\", model_folder, repr(e))", "+ 1]: result.append(id2label[int(v)]) print(result) labels = {} start = None index = 0", "1 if start is not None: # print(start) label = result[start][2:] if labels.get(label):", "= result[start][2:] if labels.get(label): te_ = text[start:index] # print(te_, labels) labels[label][te_] = [[start,", "feed) result = [] for index, prob in enumerate(probs): for v in prob[1:len(data[index])", "sess = load_model(model_path) input_ids = sess.graph.get_tensor_by_name(\"input_ids:0\") input_mask = sess.graph.get_tensor_by_name(\"input_mask:0\") # is_training segment_ids =", "index, prob in enumerate(probs): for v in prob[1:len(data[index]) + 1]: result.append(id2label[int(v)]) print(result) labels", "1]]} # print(labels) return labels def submit(path): data = [] for line in", "< max_seq_len: input_ids.append(0) input_mask.append(0) segment_ids.append(0) label_ids.append(0) ntokens.append(\"**NULL**\") assert len(input_ids) == max_seq_len assert len(input_mask)", "is not None: label = result[index - 1][2:] if labels.get(label): te_ = text[start:index]", "= text[start:index] # print(te_, labels) labels[label][te_] = [[start, index - 1]] else: te_", "labels) labels[label] = {te_: [[start, index - 1]]} start = index # print(start)", "sess.graph.get_tensor_by_name(\"loss/ReverseSequence_1:0\") def predict(text): data = [text] # 逐个分成 最大62长度的 text 进行 batch 预测", "model_path = \"./ner_bert_base/\" sess = load_model(model_path) input_ids = sess.graph.get_tensor_by_name(\"input_ids:0\") input_mask = sess.graph.get_tensor_by_name(\"input_mask:0\") #", "for k, v in label2id.items()] def process_one_example_p(tokenizer, text, max_seq_len=128): textlist = list(text) tokens", "in opts: # print(v.name) return sess_ model_path = \"./ner_bert_base/\" sess = load_model(model_path) input_ids", "keep_prob = sess.graph.get_tensor_by_name(\"keep_prob:0\") p = sess.graph.get_tensor_by_name(\"loss/ReverseSequence_1:0\") def predict(text): data = [text] # 逐个分成", "def submit(path): data = [] for line in open(path): if not line.strip(): continue", "max_seq_len assert len(segment_ids) == max_seq_len feature = (input_ids, input_mask, segment_ids) return feature def", "# for v in opts: # print(v.name) return sess_ model_path = \"./ner_bert_base/\" sess", "labels) start = None index += 1 if start is not None: #", "= text[start:index] # print(te_, labels) labels[label] = {te_: [[start, index - 1]]} start", "\"./vocab.txt\" tokenizer_ = tokenization.FullTokenizer(vocab_file=vocab_file) label2id = json.loads(open(\"./label2id.json\").read()) id2label = [k for k, v", "res = predict(_[\"text\"]) data.append(json.dumps({\"label\": res}, ensure_ascii=False)) open(\"ner_predict.json\", \"w\").write(\"\\n\".join(data)) if __name__ == \"__main__\": text_", "= predict(_[\"text\"]) data.append(json.dumps({\"label\": res}, ensure_ascii=False)) open(\"ner_predict.json\", \"w\").write(\"\\n\".join(data)) if __name__ == \"__main__\": text_ =", "list(text) tokens = [] # labels = [] for i, word in enumerate(textlist):", "[probs] = sess.run([p], feed) result = [] for index, prob in enumerate(probs): for", "sess_.graph.get_operations() # for v in opts: # print(v.name) return sess_ model_path = \"./ner_bert_base/\"", "model_folder print(\"[INFO] Model folder\", model_folder, repr(e)) # We clear devices to allow TensorFlow", "tf.train.get_checkpoint_state(model_folder) input_checkpoint = checkpoint.model_checkpoint_path print(\"[INFO] input_checkpoint:\", input_checkpoint) except Exception as e: input_checkpoint =", "text, max_seq_len=128): textlist = list(text) tokens = [] # labels = [] for", "checkpoint = tf.train.get_checkpoint_state(model_folder) input_checkpoint = checkpoint.model_checkpoint_path print(\"[INFO] input_checkpoint:\", input_checkpoint) except Exception as e:", "= [] # labels = [] for i, word in enumerate(textlist): token =", "= [] for index, prob in enumerate(probs): for v in prob[1:len(data[index]) + 1]:", "repr(e)) # We clear devices to allow TensorFlow to control on which device", "v in label2id.items()] def process_one_example_p(tokenizer, text, max_seq_len=128): textlist = list(text) tokens = []", "= tf.Session() saver.restore(sess_, input_checkpoint) # opts = sess_.graph.get_operations() # for v in opts:", "Exception as e: input_checkpoint = model_folder print(\"[INFO] Model folder\", model_folder, repr(e)) # We", "[] segment_ids = [] label_ids = [] ntokens.append(\"[CLS]\") # 句子开始设置CLS 标志 segment_ids.append(0) for", "= sess.graph.get_tensor_by_name(\"input_mask:0\") # is_training segment_ids = sess.graph.get_tensor_by_name(\"segment_ids:0\") # fc/dense/Relu cnn_block/Reshape keep_prob = sess.graph.get_tensor_by_name(\"keep_prob:0\")", "# print(te_, labels) labels[label] = {te_: [[start, index - 1]]} # print(labels) return", "= {te_: [[start, index - 1]]} # print(labels) return labels def submit(path): data", "labels) labels[label] = {te_: [[start, index - 1]]} # print(labels) return labels def", "assert len(segment_ids) == max_seq_len feature = (input_ids, input_mask, segment_ids) return feature def load_model(model_folder):", "tf.train.import_meta_graph(input_checkpoint + '.meta', clear_devices=clear_devices) # We start a session and restore the graph", "predict(text): data = [text] # 逐个分成 最大62长度的 text 进行 batch 预测 features =", "in features], segment_ids: [feature[2] for feature in features], keep_prob: 1.0 } [probs] =", "input_mask, segment_ids) return feature def load_model(model_folder): # We retrieve our checkpoint fullpath try:", "} [probs] = sess.run([p], feed) result = [] for index, prob in enumerate(probs):", "= json.loads(open(\"./label2id.json\").read()) id2label = [k for k, v in label2id.items()] def process_one_example_p(tokenizer, text,", "sess.graph.get_tensor_by_name(\"input_mask:0\") # is_training segment_ids = sess.graph.get_tensor_by_name(\"segment_ids:0\") # fc/dense/Relu cnn_block/Reshape keep_prob = sess.graph.get_tensor_by_name(\"keep_prob:0\") p", "None: label = result[index - 1][2:] if labels.get(label): te_ = text[start:index] # print(te_,", "# is_training segment_ids = sess.graph.get_tensor_by_name(\"segment_ids:0\") # fc/dense/Relu cnn_block/Reshape keep_prob = sess.graph.get_tensor_by_name(\"keep_prob:0\") p =", "word in enumerate(textlist): token = tokenizer.tokenize(word) # print(token) tokens.extend(token) if len(tokens) >= max_seq_len", "input_mask.append(0) segment_ids.append(0) label_ids.append(0) ntokens.append(\"**NULL**\") assert len(input_ids) == max_seq_len assert len(input_mask) == max_seq_len assert", "tokenizer.tokenize(word) # print(token) tokens.extend(token) if len(tokens) >= max_seq_len - 1: tokens = tokens[0:(max_seq_len", "= tf.train.get_checkpoint_state(model_folder) input_checkpoint = checkpoint.model_checkpoint_path print(\"[INFO] input_checkpoint:\", input_checkpoint) except Exception as e: input_checkpoint", ">= max_seq_len - 1: tokens = tokens[0:(max_seq_len - 2)] # labels = labels[0:(max_seq_len", "1]] else: te_ = text[start:index] # print(te_, labels) labels[label] = {te_: [[start, index", "input_mask: [feature[1] for feature in features], segment_ids: [feature[2] for feature in features], keep_prob:", "= model_folder print(\"[INFO] Model folder\", model_folder, repr(e)) # We clear devices to allow", "\"w\").write(\"\\n\".join(data)) if __name__ == \"__main__\": text_ = \"梅塔利斯在乌克兰联赛、杯赛及联盟杯中保持9场不败,状态相当出色;\" res_ = predict(text_) print(res_) submit(\"data/thuctc_valid.json\")", "+= 1 if start is not None: # print(start) label = result[start][2:] if", "[] ntokens.append(\"[CLS]\") # 句子开始设置CLS 标志 segment_ids.append(0) for i, token in enumerate(tokens): ntokens.append(token) segment_ids.append(0)", "print(\"[INFO] Model folder\", model_folder, repr(e)) # We clear devices to allow TensorFlow to", "start is not None: # print(start) label = result[start][2:] if labels.get(label): te_ =", "features], input_mask: [feature[1] for feature in features], segment_ids: [feature[2] for feature in features],", "= sess_.graph.get_operations() # for v in opts: # print(v.name) return sess_ model_path =", "= list(text) tokens = [] # labels = [] for i, word in", "operations clear_devices = True tf.reset_default_graph() # We import the meta graph and retrieve", "input_checkpoint) # opts = sess_.graph.get_operations() # for v in opts: # print(v.name) return", "[] # labels = [] for i, word in enumerate(textlist): token = tokenizer.tokenize(word)", "textlist = list(text) tokens = [] # labels = [] for i, word", "[k for k, v in label2id.items()] def process_one_example_p(tokenizer, text, max_seq_len=128): textlist = list(text)", "= tokenizer.tokenize(word) # print(token) tokens.extend(token) if len(tokens) >= max_seq_len - 1: tokens =", "in prob[1:len(data[index]) + 1]: result.append(id2label[int(v)]) print(result) labels = {} start = None index", "start = index # print(start) if re.search(\"^O\", t): if start is not None:", "len(segment_ids) == max_seq_len feature = (input_ids, input_mask, segment_ids) return feature def load_model(model_folder): #", "sess.graph.get_tensor_by_name(\"input_ids:0\") input_mask = sess.graph.get_tensor_by_name(\"input_mask:0\") # is_training segment_ids = sess.graph.get_tensor_by_name(\"segment_ids:0\") # fc/dense/Relu cnn_block/Reshape keep_prob", "is not None: # print(start) label = result[start][2:] if labels.get(label): te_ = text[start:index]", "labels def submit(path): data = [] for line in open(path): if not line.strip():", "最大62长度的 text 进行 batch 预测 features = [] for i in data: feature", "for feature in features], keep_prob: 1.0 } [probs] = sess.run([p], feed) result =", "# We retrieve our checkpoint fullpath try: checkpoint = tf.train.get_checkpoint_state(model_folder) input_checkpoint = checkpoint.model_checkpoint_path", "进行 batch 预测 features = [] for i in data: feature = process_one_example_p(tokenizer_,", "if start is not None: # print(start) label = result[start][2:] if labels.get(label): te_", "segment_ids: [feature[2] for feature in features], keep_prob: 1.0 } [probs] = sess.run([p], feed)", "labels) labels[label] = {te_: [[start, index - 1]]} # else: # print(start, labels)", "as tf import tokenization os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" vocab_file = \"./vocab.txt\" tokenizer_ = tokenization.FullTokenizer(vocab_file=vocab_file)", "We retrieve our checkpoint fullpath try: checkpoint = tf.train.get_checkpoint_state(model_folder) input_checkpoint = checkpoint.model_checkpoint_path print(\"[INFO]", "= labels[0:(max_seq_len - 2)] ntokens = [] segment_ids = [] label_ids = []", "# label_ids.append(label2id[labels[i]]) ntokens.append(\"[SEP]\") segment_ids.append(0) input_ids = tokenizer.convert_tokens_to_ids(ntokens) input_mask = [1] * len(input_ids) while", "{te_: [[start, index - 1]]} # print(labels) return labels def submit(path): data =", "ensure_ascii=False)) open(\"ner_predict.json\", \"w\").write(\"\\n\".join(data)) if __name__ == \"__main__\": text_ = \"梅塔利斯在乌克兰联赛、杯赛及联盟杯中保持9场不败,状态相当出色;\" res_ = predict(text_)", "len(input_ids) == max_seq_len assert len(input_mask) == max_seq_len assert len(segment_ids) == max_seq_len feature =", "+ '.meta', clear_devices=clear_devices) # We start a session and restore the graph weights", "in zip(\"\".join(data), result): if re.search(\"^[BS]\", t): if start is not None: label =", "{} start = None index = 0 for w, t in zip(\"\".join(data), result):", "[feature[1] for feature in features], segment_ids: [feature[2] for feature in features], keep_prob: 1.0", "import os import re import json import tensorflow as tf import tokenization os.environ[\"CUDA_VISIBLE_DEVICES\"]", "index = 0 for w, t in zip(\"\".join(data), result): if re.search(\"^[BS]\", t): if", "clear_devices = True tf.reset_default_graph() # We import the meta graph and retrieve a", "segment_ids = sess.graph.get_tensor_by_name(\"segment_ids:0\") # fc/dense/Relu cnn_block/Reshape keep_prob = sess.graph.get_tensor_by_name(\"keep_prob:0\") p = sess.graph.get_tensor_by_name(\"loss/ReverseSequence_1:0\") def", "weights sess_ = tf.Session() saver.restore(sess_, input_checkpoint) # opts = sess_.graph.get_operations() # for v", "We start a session and restore the graph weights sess_ = tf.Session() saver.restore(sess_,", "line in open(path): if not line.strip(): continue _ = json.loads(line.strip()) res = predict(_[\"text\"])", "labels[label][te_] = [[start, index - 1]] else: te_ = text[start:index] # print(te_, labels)", "devices to allow TensorFlow to control on which device it will load operations", "else: te_ = text[start:index] # print(te_, labels) labels[label] = {te_: [[start, index -", "not None: # print(start) label = result[start][2:] if labels.get(label): te_ = text[start:index] #", "index - 1]]} start = index # print(start) if re.search(\"^O\", t): if start", "= result[index - 1][2:] if labels.get(label): te_ = text[start:index] # print(te_, labels) labels[label][te_]", "start a session and restore the graph weights sess_ = tf.Session() saver.restore(sess_, input_checkpoint)", "= checkpoint.model_checkpoint_path print(\"[INFO] input_checkpoint:\", input_checkpoint) except Exception as e: input_checkpoint = model_folder print(\"[INFO]", "in features], input_mask: [feature[1] for feature in features], segment_ids: [feature[2] for feature in", "print(te_, labels) labels[label] = {te_: [[start, index - 1]]} # print(labels) return labels", "session and restore the graph weights sess_ = tf.Session() saver.restore(sess_, input_checkpoint) # opts", "result = [] for index, prob in enumerate(probs): for v in prob[1:len(data[index]) +", "len(input_ids) < max_seq_len: input_ids.append(0) input_mask.append(0) segment_ids.append(0) label_ids.append(0) ntokens.append(\"**NULL**\") assert len(input_ids) == max_seq_len assert", "fullpath try: checkpoint = tf.train.get_checkpoint_state(model_folder) input_checkpoint = checkpoint.model_checkpoint_path print(\"[INFO] input_checkpoint:\", input_checkpoint) except Exception", "return labels def submit(path): data = [] for line in open(path): if not", "in open(path): if not line.strip(): continue _ = json.loads(line.strip()) res = predict(_[\"text\"]) data.append(json.dumps({\"label\":", "= tf.train.import_meta_graph(input_checkpoint + '.meta', clear_devices=clear_devices) # We start a session and restore the", "features = [] for i in data: feature = process_one_example_p(tokenizer_, i, max_seq_len=64) features.append(feature)", "[[start, index - 1]]} # print(labels) return labels def submit(path): data = []", "which device it will load operations clear_devices = True tf.reset_default_graph() # We import", "meta graph and retrieve a Saver saver = tf.train.import_meta_graph(input_checkpoint + '.meta', clear_devices=clear_devices) #", "= text[start:index] # print(te_, labels) labels[label] = {te_: [[start, index - 1]]} #", "os import re import json import tensorflow as tf import tokenization os.environ[\"CUDA_VISIBLE_DEVICES\"] =", "= 0 for w, t in zip(\"\".join(data), result): if re.search(\"^[BS]\", t): if start", "clear devices to allow TensorFlow to control on which device it will load", "# coding:utf8 \"\"\" @author: <NAME> @time: 2019-12-07 20:51 \"\"\" import os import re", "= [1] * len(input_ids) while len(input_ids) < max_seq_len: input_ids.append(0) input_mask.append(0) segment_ids.append(0) label_ids.append(0) ntokens.append(\"**NULL**\")", "data.append(json.dumps({\"label\": res}, ensure_ascii=False)) open(\"ner_predict.json\", \"w\").write(\"\\n\".join(data)) if __name__ == \"__main__\": text_ = \"梅塔利斯在乌克兰联赛、杯赛及联盟杯中保持9场不败,状态相当出色;\" res_", "enumerate(probs): for v in prob[1:len(data[index]) + 1]: result.append(id2label[int(v)]) print(result) labels = {} start", "= [] ntokens.append(\"[CLS]\") # 句子开始设置CLS 标志 segment_ids.append(0) for i, token in enumerate(tokens): ntokens.append(token)", "= {} start = None index = 0 for w, t in zip(\"\".join(data),", "checkpoint fullpath try: checkpoint = tf.train.get_checkpoint_state(model_folder) input_checkpoint = checkpoint.model_checkpoint_path print(\"[INFO] input_checkpoint:\", input_checkpoint) except", "1]]} # else: # print(start, labels) start = None index += 1 if", "0 for w, t in zip(\"\".join(data), result): if re.search(\"^[BS]\", t): if start is", "max_seq_len feature = (input_ids, input_mask, segment_ids) return feature def load_model(model_folder): # We retrieve", "cnn_block/Reshape keep_prob = sess.graph.get_tensor_by_name(\"keep_prob:0\") p = sess.graph.get_tensor_by_name(\"loss/ReverseSequence_1:0\") def predict(text): data = [text] #", "- 1]]} # print(labels) return labels def submit(path): data = [] for line", "<NAME> @time: 2019-12-07 20:51 \"\"\" import os import re import json import tensorflow", "opts = sess_.graph.get_operations() # for v in opts: # print(v.name) return sess_ model_path", "= [] segment_ids = [] label_ids = [] ntokens.append(\"[CLS]\") # 句子开始设置CLS 标志 segment_ids.append(0)", "# opts = sess_.graph.get_operations() # for v in opts: # print(v.name) return sess_", "# print(te_, labels) labels[label][te_] = [[start, index - 1]] else: te_ = text[start:index]", "data = [] for line in open(path): if not line.strip(): continue _ =", "not None: # print(start) label = result[index - 1][2:] if labels.get(label): te_ =", "result): if re.search(\"^[BS]\", t): if start is not None: label = result[index -", "tf.reset_default_graph() # We import the meta graph and retrieve a Saver saver =", "if labels.get(label): te_ = text[start:index] # print(te_, labels) labels[label][te_] = [[start, index -", "and restore the graph weights sess_ = tf.Session() saver.restore(sess_, input_checkpoint) # opts =", "t): if start is not None: label = result[index - 1][2:] if labels.get(label):", "id2label = [k for k, v in label2id.items()] def process_one_example_p(tokenizer, text, max_seq_len=128): textlist", "submit(path): data = [] for line in open(path): if not line.strip(): continue _", "line.strip(): continue _ = json.loads(line.strip()) res = predict(_[\"text\"]) data.append(json.dumps({\"label\": res}, ensure_ascii=False)) open(\"ner_predict.json\", \"w\").write(\"\\n\".join(data))", "os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" vocab_file = \"./vocab.txt\" tokenizer_ = tokenization.FullTokenizer(vocab_file=vocab_file) label2id = json.loads(open(\"./label2id.json\").read()) id2label", "# print(v.name) return sess_ model_path = \"./ner_bert_base/\" sess = load_model(model_path) input_ids = sess.graph.get_tensor_by_name(\"input_ids:0\")", "True tf.reset_default_graph() # We import the meta graph and retrieve a Saver saver", "{input_ids: [feature[0] for feature in features], input_mask: [feature[1] for feature in features], segment_ids:", "None: # print(start) label = result[start][2:] if labels.get(label): te_ = text[start:index] # print(te_,", "enumerate(textlist): token = tokenizer.tokenize(word) # print(token) tokens.extend(token) if len(tokens) >= max_seq_len - 1:", "ntokens = [] segment_ids = [] label_ids = [] ntokens.append(\"[CLS]\") # 句子开始设置CLS 标志", "prob in enumerate(probs): for v in prob[1:len(data[index]) + 1]: result.append(id2label[int(v)]) print(result) labels =", "\"0\" vocab_file = \"./vocab.txt\" tokenizer_ = tokenization.FullTokenizer(vocab_file=vocab_file) label2id = json.loads(open(\"./label2id.json\").read()) id2label = [k", "max_seq_len=64) features.append(feature) feed = {input_ids: [feature[0] for feature in features], input_mask: [feature[1] for", "== max_seq_len assert len(input_mask) == max_seq_len assert len(segment_ids) == max_seq_len feature = (input_ids,", "start is not None: label = result[index - 1][2:] if labels.get(label): te_ =", "2)] ntokens = [] segment_ids = [] label_ids = [] ntokens.append(\"[CLS]\") # 句子开始设置CLS", "index - 1]]} # else: # print(start, labels) start = None index +=", "= tokens[0:(max_seq_len - 2)] # labels = labels[0:(max_seq_len - 2)] ntokens = []", "text[start:index] # print(te_, labels) labels[label][te_] = [[start, index - 1]] else: te_ =", "print(start) label = result[start][2:] if labels.get(label): te_ = text[start:index] # print(te_, labels) labels[label][te_]", "assert len(input_ids) == max_seq_len assert len(input_mask) == max_seq_len assert len(segment_ids) == max_seq_len feature", "load operations clear_devices = True tf.reset_default_graph() # We import the meta graph and", "# else: # print(start, labels) start = None index += 1 if start", "return sess_ model_path = \"./ner_bert_base/\" sess = load_model(model_path) input_ids = sess.graph.get_tensor_by_name(\"input_ids:0\") input_mask =", "# print(token) tokens.extend(token) if len(tokens) >= max_seq_len - 1: tokens = tokens[0:(max_seq_len -", "text[start:index] # print(te_, labels) labels[label] = {te_: [[start, index - 1]]} # else:", "ntokens.append(token) segment_ids.append(0) # label_ids.append(label2id[labels[i]]) ntokens.append(\"[SEP]\") segment_ids.append(0) input_ids = tokenizer.convert_tokens_to_ids(ntokens) input_mask = [1] *", "= \"0\" vocab_file = \"./vocab.txt\" tokenizer_ = tokenization.FullTokenizer(vocab_file=vocab_file) label2id = json.loads(open(\"./label2id.json\").read()) id2label =", "= sess.graph.get_tensor_by_name(\"keep_prob:0\") p = sess.graph.get_tensor_by_name(\"loss/ReverseSequence_1:0\") def predict(text): data = [text] # 逐个分成 最大62长度的", "= [[start, index - 1]] else: te_ = text[start:index] # print(te_, labels) labels[label]", "re import json import tensorflow as tf import tokenization os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"0\" vocab_file", "graph weights sess_ = tf.Session() saver.restore(sess_, input_checkpoint) # opts = sess_.graph.get_operations() # for", "will load operations clear_devices = True tf.reset_default_graph() # We import the meta graph", "input_ids = sess.graph.get_tensor_by_name(\"input_ids:0\") input_mask = sess.graph.get_tensor_by_name(\"input_mask:0\") # is_training segment_ids = sess.graph.get_tensor_by_name(\"segment_ids:0\") # fc/dense/Relu", "segment_ids.append(0) label_ids.append(0) ntokens.append(\"**NULL**\") assert len(input_ids) == max_seq_len assert len(input_mask) == max_seq_len assert len(segment_ids)", "segment_ids = [] label_ids = [] ntokens.append(\"[CLS]\") # 句子开始设置CLS 标志 segment_ids.append(0) for i,", "\"\"\" @author: <NAME> @time: 2019-12-07 20:51 \"\"\" import os import re import json", "Model folder\", model_folder, repr(e)) # We clear devices to allow TensorFlow to control", "- 1: tokens = tokens[0:(max_seq_len - 2)] # labels = labels[0:(max_seq_len - 2)]", "print(token) tokens.extend(token) if len(tokens) >= max_seq_len - 1: tokens = tokens[0:(max_seq_len - 2)]", "@time: 2019-12-07 20:51 \"\"\" import os import re import json import tensorflow as", "input_mask = sess.graph.get_tensor_by_name(\"input_mask:0\") # is_training segment_ids = sess.graph.get_tensor_by_name(\"segment_ids:0\") # fc/dense/Relu cnn_block/Reshape keep_prob =" ]
[ "with --files.\"\"\" # Parser error when both --files and --files-from are specified. self.CheckParserError(['--files',", "# Parser error if results destination dir is a file. filename = '/tmp/dest_dir_file'", "os.path.join(cwd, '.ssh/testing_rsa') self._tester._RunAutotest() self.assertCommandCalled( ['test_that', '--board', 'amd64-generic', '--results_dir', test_results_dir, '--ssh_private_key', testing_rsa_dir, '--debug', '--allow-chrome-crashes',", "\"\"\"Tests running an autotest from within the chroot.\"\"\" # Checks that mock version", "necessary beacuse the mock doesn't # capture the cros_sdk wrapper. self._tester._RunAutotest() # Check", "the isolate map. osutils.WriteFile(isolate_map, \"\"\"{ \"%s\": { \"label\": \"%s\", \"type\": \"console_test_launcher\", } }\"\"\"", "localhost:9222\" ' accessibility_Sanity'), self.caplog.text) class CrOSTesterTast(CrOSTesterBase): \"\"\"Tests tast test cases.\"\"\" def testSingleBaseTastTest(self): \"\"\"Verify", "with --. self.CheckParserError(['--host-cmd', 'tast', 'run'], 'must start with') def testParserErrorCWD(self): \"\"\"Verify we get", "parser errors with --files.\"\"\" # Parser error when both --files and --files-from are", "cases.\"\"\" def testBasicAutotest(self): \"\"\"Tests a simple autotest call.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.Run() #", "'This module requires Python 3.6+' # pylint: disable=protected-access class CrOSTesterBase(cros_test_lib.RunCommandTempDirTestCase): \"\"\"Base class for", "= '.ssh/testing_rsa' self._tester._device.log_level = 'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port = None self._tester._device.device =", "'-waituntilready', '-remoterunner=%s' % os.path.join(tast_bin_dir, 'remote_test_runner'), '-remotebundledir=%s' % os.path.join(tast_cache_dir, 'tast-remote-tests-cros/usr', 'libexec/tast/bundles/remote'), '-remotedatadir=%s' % os.path.join(", "arguments of the particular chrome test. \"\"\" self._tester.args = [test_exe] + test_args if", "running a remote command when src files are not specified. The remote command", "import cros_set_lsb_release from chromite.utils import outcap pytestmark = cros_test_lib.pytestmark_inside_only assert sys.version_info >= (3,", "'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus-full/latest',]) # Specify an xbuddy link. self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains([", "license that can be # found in the LICENSE file. \"\"\"Unit tests for", "= True self._tester.Run() # Check that we use the custom port when talking", "['--test_that-args', '--allow-chrome-crashes'] cwd = os.path.join('/mnt/host/source', os.path.relpath(os.getcwd(), constants.SOURCE_ROOT)) test_results_dir = os.path.join(cwd, 'test_results') testing_rsa_dir =", "'/tmp/file2', '/tmp/file3'] osutils.WriteFile(files_from, '\\n'.join(file_list)) self.assertEqual(file_list, cros_test.FileList(files, files_from)) class CrOSTesterMiscTests(CrOSTesterBase): \"\"\"Tests miscellaneous test cases.\"\"\"", "we get parser errors with --files.\"\"\" # Parser error when both --files and", "given as a string. if isinstance(args, str): args = [args] # Putting outcap.OutputCapturer()", "an autotest from outside the chroot.\"\"\" # Checks that mock version has been", "self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') osutils.SafeMakedirs(self._tester.build_dir) isolate_map = self.TempFilePath('testing/buildbot/gn_isolate_map.pyl') # Add info about the specified", "ran properly. Args: test_exe: The name of the chrome test. test_label: The label", "'-F /dev/null -i /dev/null', '172.16.17.32', 'accessibility_Sanity'], dryrun=False, enter_chroot=True) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=True) def testInsideChrootAutotest(self, _check_inside_chroot_mock):", "pylint: disable=protected-access class CrOSTesterBase(cros_test_lib.RunCommandTempDirTestCase): \"\"\"Base class for setup and creating a temp file", "\"\"\"Verify build/deploy and chrome test commands when a test arg is given.\"\"\" test_exe", "swallows SystemExit exception check. with self.assertRaises(SystemExit): with outcap.OutputCapturer() as output: cros_test.ParseCommandLine(args) self.assertIn(error_msg, output.GetStderr())", "it runs on the host. self.assertCommandContains(['ssh', 'tast'], expected=False) @pytest.mark.usefixtures('testcase_caplog') class CrOSTesterAutotest(CrOSTesterBase): \"\"\"Tests autotest", "import pytest # pylint: disable=import-error from chromite.lib import constants from chromite.lib import cros_test", "&& !disabled)' ]) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot') def testTastTestWithOtherArgs(self, check_inside_chroot_mock): \"\"\"Verify running a single tast test", "error if no results source is given. self.CheckParserError(['--results-dest-dir', '/tmp/dest_dir'], 'with results-dest-dir') # Parser", "if using chronos without a test command. self.CheckParserError('--as-chronos', 'as-chronos') # Parser error if", "testRunDeviceCmdWithoutSrcFiles(self): \"\"\"Verify running a remote command when src files are not specified. The", "'-p', '9222', 'root@localhost', '--', 'stop ui']) # Ensure a user activity ping is", "cwd is not an absolute path. self.CheckParserError(['--cwd', 'tmp/cwd'], 'cwd must be an absolute", "path') def testParserErrorFiles(self): \"\"\"Verify we get parser errors with --files.\"\"\" # Parser error", "are authorized with chronos. self.assertCommandContains(['cp', '-r', '/root/.ssh/', '/home/chronos/user/']) # Ensure chronos has ownership", "'tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', '((\"dep:chrome\" || \"dep:android\") && !flaky && !disabled)'", "\"\"\"Verify build/deploy and chrome test commands using rsync to copy.\"\"\" test_exe = 'crypto_unittests'", "dir is a file. filename = '/tmp/dest_dir_file' osutils.Touch(filename) self.CheckParserError(['--results-src', '/tmp/results', '--results-dest-dir', filename], 'existing", "'dbus-send', '--system', '--type=method_call', '--dest=org.chromium.PowerManager', '/org/chromium/PowerManager', 'org.chromium.PowerManager.HandleUserActivity', 'int32:0']) args = ' '.join(test_args) if test_args", "self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system-guest', 'testAddResults' ]) def testRunDeviceCmd(self): \"\"\"Verify a run", "Check that we use the custom port when talking to the VM. self.assertCommandContains(", "ssh keys are authorized with chronos. self.assertCommandContains(['cp', '-r', '/root/.ssh/', '/home/chronos/user/']) # Ensure chronos", "for building/deploying Chrome.\"\"\" # Parser error if no build directory is specified. self.CheckParserError('--build',", "= self.TempFilePath('out_amd64-generic/Release') osutils.SafeMakedirs(self._tester.build_dir) isolate_map = self.TempFilePath('testing/buildbot/gn_isolate_map.pyl') # Add info about the specified chrome", "for running a chrome test. Args: test_exe: The name of the chrome test.", "Ensure a user activity ping is sent to the device. self.assertCommandContains(['ssh', '-p', '9222',", "'root@localhost:%s' % filename, self._tester.results_dest_dir]) def testFileList(self): \"\"\"Verify that FileList returns the correct files.\"\"\"", "tests from the SimpleChrome SDK.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester._device.private_key = '/tmp/.ssh/testing_rsa' tast_cache_dir =", "self._tester.Run() self.assertCommandContains(['--nostrip', '--mount']) def testFetchResults(self): \"\"\"Verify that results files/directories are copied from the", "used to build a CrOSTest. Returns: An instance of cros_test.CrOSTest. \"\"\" opts =", "\"\"\"Sets configurations necessary for running a chrome test. Args: test_exe: The name of", "test arg is given.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:' + test_exe test_args", "os.path.join(tast_cache_dir, 'tast-remote-tests-cros/usr', 'libexec/tast/bundles/remote'), '-remotedatadir=%s' % os.path.join( tast_cache_dir, 'tast-remote-tests-cros/usr', 'share/tast/data'), '-ephemeraldevserver=true', '-keyfile', '/tmp/.ssh/testing_rsa', '-extrauseflags=tast_vm',", "CrOSTest. Returns: An instance of cros_test.CrOSTest. \"\"\" opts = cros_test.ParseCommandLine(opts if opts else", "def testHostCmd(self): \"\"\"Verify running a host command.\"\"\" self._tester.host_cmd = True self._tester.build_dir = '/some/chromium/dir'", "self._tester.build_dir, test_args) class CrOSTesterParser(CrOSTesterBase): \"\"\"Tests parser test cases.\"\"\" def CheckParserError(self, args, error_msg): \"\"\"Checks", "test. self.assertCommandContains(['rm', '-rf', '/usr/local/cros_test']) def testRunDeviceCmdWithSetCwd(self): \"\"\"Verify a run device command call when", "# Parser error if build directory is not an existing directory. self.CheckParserError(['--deploy', '--build-dir',", "as output: cros_test.ParseCommandLine(args) self.assertIn(error_msg, output.GetStderr()) def testParserErrorChromeTest(self): \"\"\"Verify we get a parser error", "tester._device.use_sudo = False tester._device.board = 'amd64-generic' tester._device.image_path = self.TempFilePath( 'chromiumos_qemu_image.bin') osutils.Touch(tester._device.image_path) version_str =", "before running tests.\"\"\" self._tester.start_vm = True self._tester.Run() # Check if new VM got", "test command. self.CheckParserError('--as-chronos', 'as-chronos') # Parser error if there are args, but no", "= ['testAddResults'] self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system', 'testAddResults' ]) def testCatapultAsGuest(self): \"\"\"Verify", "= cros_test.ParseCommandLine(opts if opts else []) opts.enable_kvm = True # We check if", "runs on the host. self.assertCommandContains(['ssh', 'tast'], expected=False) @pytest.mark.usefixtures('testcase_caplog') class CrOSTesterAutotest(CrOSTesterBase): \"\"\"Tests autotest test", "and chrome test commands using scp to copy.\"\"\" test_exe = 'crypto_unittests' test_label =", "when using certain commands.\"\"\" # Parser error if no test command is provided.", "# Putting outcap.OutputCapturer() before assertRaises(SystemExit) # swallows SystemExit exception check. with self.assertRaises(SystemExit): with", "a relative path') # Parser error when a file has a bad path.", "self.createTester(opts=['--ssh-port=12345']) self._tester.start_vm = True self._tester.Run() # Check that we use the custom port", "is provided. self.CheckParserError('--remote-cmd', 'specify test command') # Parser error if using chronos without", "build_dir, test_exe]) # Ensure that the runtime dependencies are checked for. self.assertCommandContains(['gn', 'desc',", "'flash', 'localhost', 'xbuddy://remote/octopus/R82-12901.0.0'], expected=False) def testDeployChrome(self): \"\"\"Tests basic deploy chrome command.\"\"\" self._tester.deploy =", "test_exe, test_label, build_dir, test_args=None): \"\"\"Checks to see that chrome test commands ran properly.", "parser error is raised. Args: args: List of commandline arguments. error_msg: Error message", "import outcap pytestmark = cros_test_lib.pytestmark_inside_only assert sys.version_info >= (3, 6), 'This module requires", "we enter the chroot before running test_that. self.assertIn(('cros_sdk -- test_that --board amd64-generic --no-quickmerge'", "class for setup and creating a temp file path.\"\"\" def createTester(self, opts=None): \"\"\"Builds", "\"\"\" self._tester.args = [test_exe] + test_args if test_args else [test_exe] self._tester.chrome_test = True", "needed.\"\"\" self._tester.flash = True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12901.0.0', }", "self.CheckParserError(['--files', 'file_list', '--files-from', 'file'], '--files and --files-from') # Parser error when --files-from does", "!disabled)' ] self._tester.Run() self.assertCommandContains([ 'tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', '((\"dep:chrome\" || \"dep:android\")", "['/usr/local/autotest/bin/vm_sanity.py'] self._tester.Run() self.assertCommandContains(['ssh', '-p', '9222', '/usr/local/autotest/bin/vm_sanity.py']) self.assertCommandContains(['mkdir', '-p'], expected=False) self.assertCommandContains(['cd %s && /usr/local/autotest/bin/'", "is governed by a BSD-style license that can be # found in the", "self._tester.Run() self.assertCommandContains(['ssh', '-p', '9222', '/usr/local/autotest/bin/vm_sanity.py']) self.assertCommandContains(['mkdir', '-p'], expected=False) self.assertCommandContains(['cd %s && /usr/local/autotest/bin/' 'vm_sanity.py'", "--files-from') # Parser error when --files-from does not exist. self.CheckParserError(['--files-from', '/fake/file'], 'is not", "the autotest directory. self.assertCommandContains('cd /usr/local/autotest && ./bin/vm_sanity.py') def testRunDeviceCmdWithoutSrcFiles(self): \"\"\"Verify running a remote", "commands when a test arg is given.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:'", "in self._tester.results_src: self.assertCommandContains(['scp', 'root@localhost:%s' % filename, self._tester.results_dest_dir]) def testFileList(self): \"\"\"Verify that FileList returns", "self.assertCommandContains(['rm', '-rf', '/usr/local/cros_test']) def testRunDeviceCmdWithSetCwd(self): \"\"\"Verify a run device command call when giving", "'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port = None self._tester._device.device = '172.16.17.32' self._tester.test_that_args = ['--test_that-args',", "# Parser error if no results destination dir is given. self.CheckParserError(['--results-src', '/tmp/results'], 'with", "= '/usr/local/autotest' self._tester.args = ['./bin/vm_sanity.py'] self._tester.Run() # Ensure command runs in the autotest", "True self._tester.Run() # Check if new VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Check", "test. test_label: The label of the chrome test. test_args: A list of arguments", "test_exe, test_exe), '../../third_party/chromite'] # Creates the test_exe to be an executable. osutils.Touch(os.path.join(self._tester.build_dir, runtime_deps[0]),", "self._tester.start_vm = True self._tester.Run() # Check if new VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm'])", "Parser error when both --files and --files-from are specified. self.CheckParserError(['--files', 'file_list', '--files-from', 'file'],", "'ui.ChromeLogin' ]) class CrOSTesterChromeTest(CrOSTesterBase): \"\"\"Tests chrome test test cases.\"\"\" def SetUpChromeTest(self, test_exe, test_label,", "temp directory on the target. \"\"\" self._tester.remote_cmd = True self._tester.args = ['/usr/local/autotest/bin/vm_sanity.py'] self._tester.Run()", "self._tester.test_that_args = ['--test_that-args', '--allow-chrome-crashes'] cwd = os.path.join('/mnt/host/source', os.path.relpath(os.getcwd(), constants.SOURCE_ROOT)) test_results_dir = os.path.join(cwd, 'test_results')", "to a parent path. self.CheckParserError(['--cwd', '../new_cwd'], 'cwd cannot start with ..') # Parser", "'-R', 'chronos:', '/usr/local/cros_test']) # Ensure command runs in the target directory. self.assertCommandContains('cd /usr/local/cros_test", "and ignores 'files'. file_list = ['/tmp/file1', '/tmp/file2', '/tmp/file3'] osutils.WriteFile(files_from, '\\n'.join(file_list)) self.assertEqual(file_list, cros_test.FileList(files, files_from))", "of the directory. self.assertCommandContains(['chown', '-R', 'chronos:', '/usr/local/cros_test']) # Ensure command runs in the", "self._tester.cache_dir).CreateCacheReference( self._tester._device.board, 'chromeos-base') tast_bin_dir = os.path.join(tast_cache_dir, 'tast-cmd/usr/bin') osutils.SafeMakedirs(tast_bin_dir) self._tester.Run() self.assertCommandContains([ os.path.join(tast_bin_dir, 'tast'), 'run',", "are being copied over to the device using rsync. self.assertCommandContains(['rsync', '%s/' % self._tester.staging_dir,", "additional args don't start with --. self.CheckParserError(['--host-cmd', 'tast', 'run'], 'must start with') def", "A few files used by the chrome test. runtime_deps = [ './%s' %", "self._tester.staging_dir, 'root@localhost:/usr/local/chrome_test']) rsync_mock.assert_called() def testChromeTestExeArg(self): \"\"\"Verify build/deploy and chrome test commands when a", "additional arguments.\"\"\" self._tester.deploy = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.nostrip = True self._tester.mount =", "the build directory from the parsed options. build_dir = cros_test.ParseCommandLine( ['--chrome-test', '--', test_dir]).build_dir", "self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Checks that autotest is running. self.assertCommandContains([ 'test_that', '--no-quickmerge', '--ssh_options', '-F", "= True self._tester.files = [self.TempFilePath('crypto_unittests')] osutils.Touch(self._tester.files[0], mode=0o700) self._tester.as_chronos = True self._tester.args = ['crypto_unittests',", "os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus/R82-12901.0.0']) def testFlashSkip(self): \"\"\"Tests flash command is skipped when", "runs in the target directory. self.assertCommandContains('cd /usr/local/cros_test && crypto_unittests ' '--test-launcher-print-test-stdio=always') # Ensure", "testChromeTestRsync(self): \"\"\"Verify build/deploy and chrome test commands using rsync to copy.\"\"\" test_exe =", "'12901.0.0', } self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains( [os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'localhost', 'xbuddy://remote/octopus/R82-12901.0.0'], expected=False)", "self._tester.Run() # Ensure command runs in the autotest directory. self.assertCommandContains('cd /usr/local/autotest && ./bin/vm_sanity.py')", "expected=False) def testDeployChrome(self): \"\"\"Tests basic deploy chrome command.\"\"\" self._tester.deploy = True self._tester.build_dir =", "with ..') # Parser error when a non-existent file is passed to --files.", "using scp to copy.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:' + test_exe self.SetUpChromeTest(test_exe,", "'amd64-generic' tester._device.image_path = self.TempFilePath( 'chromiumos_qemu_image.bin') osutils.Touch(tester._device.image_path) version_str = ('QEMU emulator version 2.6.0, Copyright", "see that chrome test commands ran properly. Args: test_exe: The name of the", "else '' # Ensure the chrome test is run. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost',", "basic functionality.\"\"\" self._tester.Run() isrunning_mock.assert_called() # Run vm_sanity. self.assertCommandContains([ 'ssh', '-p', '9222', 'root@localhost', '--',", "True self._tester.Run() self.assertCommandContains(['--nostrip', '--mount']) def testFetchResults(self): \"\"\"Verify that results files/directories are copied from", "being built. self.assertCommandContains(['autoninja', '-C', build_dir, test_exe]) # Ensure that the runtime dependencies are", "if test_args else [test_exe] self._tester.chrome_test = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') osutils.SafeMakedirs(self._tester.build_dir) isolate_map =", "tests with an expression.\"\"\" self._tester.tast = [ '((\"dep:chrome\" || \"dep:android\") && !flaky &&", "Ensure files are being copied over to the device using scp. self.assertCommandContains(['scp', '%s/'", "test.\"\"\" return os.path.join(self.tempdir, file_path) class CrOSTester(CrOSTesterBase): \"\"\"Tests miscellaneous utility methods\"\"\" def testStartVM(self): \"\"\"Verify", "that autotest is running. self.assertCommandContains([ 'test_that', '--no-quickmerge', '--ssh_options', '-F /dev/null -i /dev/null', 'localhost:9222',", "This is necessary beacuse the mock doesn't # capture the cros_sdk wrapper. self._tester._RunAutotest()", "runtime files. self.rc.AddCmdResult( partial_mock.InOrder(['gn', 'desc', test_label]), output='\\n'.join(runtime_deps)) def CheckChromeTestCommands(self, test_exe, test_label, build_dir, test_args=None):", "'/usr/local/cros_test']) def testRunDeviceCmdWithSetCwd(self): \"\"\"Verify a run device command call when giving a cwd.\"\"\"", "'ssh://localhost:9222', 'xbuddy://remote/octopus-full/latest',]) # Specify an xbuddy link. self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR,", "os.path.join(cwd, 'test_results') testing_rsa_dir = os.path.join(cwd, '.ssh/testing_rsa') self._tester._RunAutotest() self.assertCommandCalled( ['test_that', '--board', 'amd64-generic', '--results_dir', test_results_dir,", "osutils.Touch(self._tester.files[0], mode=0o700) self._tester.as_chronos = True self._tester.args = ['crypto_unittests', '--test-launcher-print-test-stdio=always'] self._tester.Run() # Ensure target", "Copyright (c) ' '2003-2008 <NAME>') self.rc.AddCmdResult(partial_mock.In('--version'), output=version_str) return tester def setUp(self): \"\"\"Common set", "# Checks that autotest is running. self.assertCommandContains([ 'test_that', '--no-quickmerge', '--ssh_options', '-F /dev/null -i", "\"\"\"Tests deploy chrome command with additional arguments.\"\"\" self._tester.deploy = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release')", "&& su chronos -c -- ' '\"out_amd64-generic/Release/%s %s\"' % (test_exe, args)]) def testChromeTestRsync(self):", "--host-cmd or --chrome-test') # Parser error when additional args don't start with --.", "('QEMU emulator version 2.6.0, Copyright (c) ' '2003-2008 <NAME>') self.rc.AddCmdResult(partial_mock.In('--version'), output=version_str) return tester", "osutils from chromite.lib import partial_mock from chromite.scripts import cros_set_lsb_release from chromite.utils import outcap", "Parser error if no results destination dir is given. self.CheckParserError(['--results-src', '/tmp/results'], 'with results-src')", "chrome is being built. self.assertCommandContains(['autoninja', '-C', build_dir, test_exe]) # Ensure that the runtime", "'--debug', '--allow-chrome-crashes', '--no-quickmerge', '--ssh_options', '-F /dev/null -i /dev/null', '172.16.17.32', 'accessibility_Sanity'], dryrun=False, enter_chroot=True) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot',", "\"\"\"Tests basic deploy chrome command.\"\"\" self._tester.deploy = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.Run() self.assertCommandContains(['deploy_chrome',", "that chrome test commands ran properly. Args: test_exe: The name of the chrome", "method for all tests.\"\"\" self._tester = self.createTester() def TempFilePath(self, file_path): \"\"\"Creates a temporary", "functionality.\"\"\" self._tester.Run() isrunning_mock.assert_called() # Run vm_sanity. self.assertCommandContains([ 'ssh', '-p', '9222', 'root@localhost', '--', '/usr/local/autotest/bin/vm_sanity.py'", "got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Check if new VM is responsive. self.assertCommandContains( ['ssh',", "'--', 'true']) def testFlash(self): \"\"\"Tests flash command.\"\"\" # Verify that specifying the board", "executable. osutils.Touch(os.path.join(self._tester.build_dir, runtime_deps[0]), mode=0o700) for dep in runtime_deps[1:]: osutils.Touch(os.path.join(self._tester.build_dir, dep), makedirs=True) # Mocks", "command.\"\"\" # Verify that specifying the board gets the latest canary. self._tester.flash =", "parser test cases.\"\"\" def CheckParserError(self, args, error_msg): \"\"\"Checks that parser error is raised.", "of the test. self.assertCommandContains(['rm', '-rf', '/usr/local/cros_test']) def testRunDeviceCmdWithSetCwd(self): \"\"\"Verify a run device command", "os.path.join(tast_bin_dir, 'remote_test_runner'), '-remotebundledir=%s' % os.path.join(tast_cache_dir, 'tast-remote-tests-cros/usr', 'libexec/tast/bundles/remote'), '-remotedatadir=%s' % os.path.join( tast_cache_dir, 'tast-remote-tests-cros/usr', 'share/tast/data'),", "SDK.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester._device.private_key = '/tmp/.ssh/testing_rsa' tast_cache_dir = cros_test_lib.FakeSDKCache( self._tester.cache_dir).CreateCacheReference( self._tester._device.board, 'chromeos-base')", "mode=0o700) for dep in runtime_deps[1:]: osutils.Touch(os.path.join(self._tester.build_dir, dep), makedirs=True) # Mocks the output by", "args, but no command. self.CheckParserError('--some_test some_command', '--remote-cmd or --host-cmd or --chrome-test') # Parser", "self._tester._device.private_key = '/tmp/.ssh/testing_rsa' tast_cache_dir = cros_test_lib.FakeSDKCache( self._tester.cache_dir).CreateCacheReference( self._tester._device.board, 'chromeos-base') tast_bin_dir = os.path.join(tast_cache_dir, 'tast-cmd/usr/bin')", "files are being copied over to the device using rsync. self.assertCommandContains(['rsync', '%s/' %", "test_results_dir, '--ssh_private_key', testing_rsa_dir, '--debug', '--allow-chrome-crashes', '--no-quickmerge', '--ssh_options', '-F /dev/null -i /dev/null', '172.16.17.32', 'accessibility_Sanity'],", "the correct files.\"\"\" # Ensure FileList returns files when files_from is None. files", "file path.\"\"\" def createTester(self, opts=None): \"\"\"Builds a CrOSTest suitable for testing. Args: opts:", "\"\"\"Tests running an autotest from outside the chroot.\"\"\" # Checks that mock version", "error when a file has a bad path. self.CheckParserError(['--files', '../some_file'], 'cannot start with", "\"\"\"Checks that parser error is raised. Args: args: List of commandline arguments. error_msg:", "# Parser error if there are args, but no command. self.CheckParserError('--some_test some_command', '--remote-cmd", "beacuse the mock doesn't # capture the cros_sdk wrapper. self._tester._RunAutotest() # Check that", "'true']) def testFlash(self): \"\"\"Tests flash command.\"\"\" # Verify that specifying the board gets", "test_label]), output='\\n'.join(runtime_deps)) def CheckChromeTestCommands(self, test_exe, test_label, build_dir, test_args=None): \"\"\"Checks to see that chrome", "= ['ui.ChromeLogin'] self._tester.Run() self.assertCommandContains(['tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin']) def testExpressionBaseTastTest(self): \"\"\"Verify", "miscellaneous test cases.\"\"\" @mock.patch('chromite.lib.vm.VM.IsRunning', return_value=True) def testBasic(self, isrunning_mock): \"\"\"Tests basic functionality.\"\"\" self._tester.Run() isrunning_mock.assert_called()", "True self._tester.args = ['/usr/local/autotest/bin/vm_sanity.py'] self._tester.Run() self.assertCommandContains(['ssh', '-p', '9222', '/usr/local/autotest/bin/vm_sanity.py']) self.assertCommandContains(['mkdir', '-p'], expected=False) self.assertCommandContains(['cd", "we get a parser error for --chrome-test when no args are given.\"\"\" self.CheckParserError('--chrome-test',", "files when files_from is None. files = ['/tmp/filename1', '/tmp/filename2'] self.assertEqual(files, cros_test.FileList(files, None)) #", "test_exe self._tester.chrome_test_deploy_target_dir = '/usr/local/chrome_test' # test_label looks like //crypto:crypto_unittests. # label_root extracts 'crypto'", "'run', 'localhost:9222', 'ui.ChromeLogin'], check=False, dryrun=False, extra_env={'CHROMIUM_OUTPUT_DIR': '/some/chromium/dir'}) # Ensure that --host-cmd does not", "-c -- ' '\"out_amd64-generic/Release/%s %s\"' % (test_exe, args)]) def testChromeTestRsync(self): \"\"\"Verify build/deploy and", "chroot.\"\"\" # Checks that mock version has been called. # TODO(crbug/1065172): Invalid assertion", "mode.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.guest = True self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system-guest',", "} self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus-full/latest',]) # Specify an xbuddy link.", "@pytest.mark.usefixtures('testcase_caplog') class CrOSTesterAutotest(CrOSTesterBase): \"\"\"Tests autotest test cases.\"\"\" def testBasicAutotest(self): \"\"\"Tests a simple autotest", "self.assertCommandContains(['ssh', '-p', '9222', '/usr/local/autotest/bin/vm_sanity.py']) self.assertCommandContains(['mkdir', '-p'], expected=False) self.assertCommandContains(['cd %s && /usr/local/autotest/bin/' 'vm_sanity.py' %", "__future__ import print_function import os import sys import mock import pytest # pylint:", "self._tester.public_image = True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12900.0.0', } self._tester.Run()", "self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.nostrip = True self._tester.mount = True self._tester.Run() self.assertCommandContains(['--nostrip', '--mount']) def", "Returns: An instance of cros_test.CrOSTest. \"\"\" opts = cros_test.ParseCommandLine(opts if opts else [])", "'tast'), 'run', '-build=false', '-waituntilready', '-remoterunner=%s' % os.path.join(tast_bin_dir, 'remote_test_runner'), '-remotebundledir=%s' % os.path.join(tast_cache_dir, 'tast-remote-tests-cros/usr', 'libexec/tast/bundles/remote'),", "= self.TempFilePath( 'chromiumos_qemu_image.bin') osutils.Touch(tester._device.image_path) version_str = ('QEMU emulator version 2.6.0, Copyright (c) '", "the directory. self.assertCommandContains(['chown', '-R', 'chronos:', '/usr/local/cros_test']) # Ensure command runs in the target", "self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus/R82-12901.0.0']) def testFlashSkip(self): \"\"\"Tests flash command is", "'tast', 'run'], 'must start with') def testParserErrorCWD(self): \"\"\"Verify we get parser errors when", "host. self.assertCommandContains(['ssh', 'tast'], expected=False) @pytest.mark.usefixtures('testcase_caplog') class CrOSTesterAutotest(CrOSTesterBase): \"\"\"Tests autotest test cases.\"\"\" def testBasicAutotest(self):", "that the build directory is set when not specified.\"\"\" test_dir = self.TempFilePath('out_amd64-generic/Release/crypto_unittests') #", "previously been mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir = '/mnt/host/source/test_results' self._tester._device.private_key =", "\" --ssh_options '-F /dev/null -i /dev/null' localhost:9222\" ' accessibility_Sanity'), self.caplog.text) class CrOSTesterTast(CrOSTesterBase): \"\"\"Tests", "'test_results') testing_rsa_dir = os.path.join(cwd, '.ssh/testing_rsa') self._tester._RunAutotest() self.assertCommandCalled( ['test_that', '--board', 'amd64-generic', '--results_dir', test_results_dir, '--ssh_private_key',", "['ui.ChromeLogin'] self._tester.Run() self.assertCommandContains(['tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin']) def testExpressionBaseTastTest(self): \"\"\"Verify running", "'-timeout=100', '-resultsdir', '/tmp/results', '172.16.17.32', 'ui.ChromeLogin']) def testTastTestSDK(self): \"\"\"Verify running tast tests from the", "'' # Ensure the chrome test is run. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--',", "{ cros_set_lsb_release.LSB_KEY_VERSION: '12901.0.0', } self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains( [os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'localhost',", "self._tester.results_dest_dir = self.TempFilePath('results_dir') osutils.SafeMakedirs(self._tester.results_dest_dir) self._tester.Run() for filename in self._tester.results_src: self.assertCommandContains(['scp', 'root@localhost:%s' % filename,", "'/usr/local/cros_test']) # Ensure command runs in the target directory. self.assertCommandContains('cd /usr/local/cros_test && crypto_unittests", "host command.\"\"\" self._tester.host_cmd = True self._tester.build_dir = '/some/chromium/dir' self._tester.args = ['tast', 'run', 'localhost:9222',", "got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Checks that autotest is running. self.assertCommandContains([ 'test_that', '--no-quickmerge',", "'-remotedatadir=%s' % os.path.join( tast_cache_dir, 'tast-remote-tests-cros/usr', 'share/tast/data'), '-ephemeraldevserver=true', '-keyfile', '/tmp/.ssh/testing_rsa', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin' ])", "error if results destination dir is a file. filename = '/tmp/dest_dir_file' osutils.Touch(filename) self.CheckParserError(['--results-src',", "self.assertCommandContains(['scp', 'root@localhost:%s' % filename, self._tester.results_dest_dir]) def testFileList(self): \"\"\"Verify that FileList returns the correct", "a host command.\"\"\" self._tester.host_cmd = True self._tester.build_dir = '/some/chromium/dir' self._tester.args = ['tast', 'run',", "]) def testCatapultAsGuest(self): \"\"\"Verify that we use the correct browser in guest mode.\"\"\"", "directories.\"\"\" # Parser error if --results-src is not absolute. self.CheckParserError(['--results-src', 'tmp/results'], 'absolute') #", "test_exe]) # Ensure that the runtime dependencies are checked for. self.assertCommandContains(['gn', 'desc', build_dir,", "str): args = [args] # Putting outcap.OutputCapturer() before assertRaises(SystemExit) # swallows SystemExit exception", "chronos -c -- ' '\"out_amd64-generic/Release/%s %s\"' % (test_exe, args)]) def testChromeTestRsync(self): \"\"\"Verify build/deploy", "test_args=None): \"\"\"Sets configurations necessary for running a chrome test. Args: test_exe: The name", "] self._tester.Run() self.assertCommandContains([ 'tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', '((\"dep:chrome\" || \"dep:android\") &&", "test commands ran properly. Args: test_exe: The name of the chrome test. test_label:", "'\\n'.join(file_list)) self.assertEqual(file_list, cros_test.FileList(files, files_from)) class CrOSTesterMiscTests(CrOSTesterBase): \"\"\"Tests miscellaneous test cases.\"\"\" @mock.patch('chromite.lib.vm.VM.IsRunning', return_value=True) def", "['accessibility_Sanity'] self._tester.results_dir = 'test_results' self._tester._device.private_key = '.ssh/testing_rsa' self._tester._device.log_level = 'debug' self._tester._device.should_start_vm = False", "self._tester.autotest = ['accessibility_Sanity'] # Capture the run command. This is necessary beacuse the", "the VM. self.assertCommandContains( ['ssh', '-p', '12345', 'root@localhost', '--', 'true']) def testFlash(self): \"\"\"Tests flash", "args: List of commandline arguments. error_msg: Error message to check for. \"\"\" #", "exception is not raised if it fails. self.assertCommandCalled( ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'], check=False,", "Checks that mock version has been called. # TODO(crbug/1065172): Invalid assertion that had", "cwd = os.path.join('/mnt/host/source', os.path.relpath(os.getcwd(), constants.SOURCE_ROOT)) test_results_dir = os.path.join(cwd, 'test_results') testing_rsa_dir = os.path.join(cwd, '.ssh/testing_rsa')", "2019 The Chromium OS Authors. All rights reserved. # Use of this source", "# Mocks the output by providing necessary runtime files. self.rc.AddCmdResult( partial_mock.InOrder(['gn', 'desc', test_label]),", "= '172.16.17.32' self._tester.results_dir = '/tmp/results' self._tester.Run() check_inside_chroot_mock.assert_called() self.assertCommandContains(['tast', '-verbose', 'run', '-build=false', '-waituntilready', '-timeout=100',", "'--', 'cd /usr/local/chrome_test && su chronos -c -- ' '\"out_amd64-generic/Release/%s %s\"' % (test_exe,", "parser errors for results src/dest directories.\"\"\" # Parser error if --results-src is not", "build/deploy and chrome test commands using scp to copy.\"\"\" test_exe = 'crypto_unittests' test_label", "FileList returns the correct files.\"\"\" # Ensure FileList returns files when files_from is", "new VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Check if new VM is responsive.", "'.ssh/testing_rsa' self._tester._device.log_level = 'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port = None self._tester._device.device = '172.16.17.32'", "results files/directories are copied from the DUT.\"\"\" self._tester.results_src = ['/tmp/results/cmd_results', '/tmp/results/filename.txt', '/tmp/results/test_results'] self._tester.results_dest_dir", "['ssh', '-p', '9222', 'root@localhost', '--', 'true']) def testStartVMCustomPort(self): \"\"\"Verify that a custom SSH", "# Verify that specifying the board gets the latest canary. self._tester.flash = True", "'--test-launcher-print-test-stdio=always'] self._tester.Run() # Ensure target directory is created on the DUT. self.assertCommandContains(['mkdir', '-p',", "ignores 'files'. file_list = ['/tmp/file1', '/tmp/file2', '/tmp/file3'] osutils.WriteFile(files_from, '\\n'.join(file_list)) self.assertEqual(file_list, cros_test.FileList(files, files_from)) class", "self._tester.files = [self.TempFilePath('crypto_unittests')] osutils.Touch(self._tester.files[0], mode=0o700) self._tester.as_chronos = True self._tester.args = ['crypto_unittests', '--test-launcher-print-test-stdio=always'] self._tester.Run()", "given. self.CheckParserError(['--results-dest-dir', '/tmp/dest_dir'], 'with results-dest-dir') # Parser error if results destination dir is", "results destination dir is given. self.CheckParserError(['--results-src', '/tmp/results'], 'with results-src') # Parser error if", "not invoke ssh since it runs on the host. self.assertCommandContains(['ssh', 'tast'], expected=False) @pytest.mark.usefixtures('testcase_caplog')", "'-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin']) def testExpressionBaseTastTest(self): \"\"\"Verify running a set of tast", "command.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system', 'testAddResults' ]) def", "'--files-from', 'file'], '--files and --files-from') # Parser error when --files-from does not exist.", "tast test with various arguments.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.test_timeout = 100 self._tester._device.log_level =", "'root@localhost', '--', 'dbus-send', '--system', '--type=method_call', '--dest=org.chromium.PowerManager', '/org/chromium/PowerManager', 'org.chromium.PowerManager.HandleUserActivity', 'int32:0']) args = ' '.join(test_args)", "return_value=True): tester = cros_test.CrOSTest(opts) tester._device.use_sudo = False tester._device.board = 'amd64-generic' tester._device.image_path = self.TempFilePath(", "the DUT. self.assertCommandContains(['mkdir', '-p', '/usr/local/cros_test']) # Ensure test ssh keys are authorized with", "'tmp/cwd'], 'cwd must be an absolute path') def testParserErrorFiles(self): \"\"\"Verify we get parser", "[os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'localhost', 'xbuddy://remote/octopus/R82-12901.0.0'], expected=False) def testDeployChrome(self): \"\"\"Tests basic deploy chrome command.\"\"\"", "vm_sanity. self.assertCommandContains([ 'ssh', '-p', '9222', 'root@localhost', '--', '/usr/local/autotest/bin/vm_sanity.py' ]) def testCatapult(self): \"\"\"Verify catapult", "= 'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port = None self._tester._device.device = '172.16.17.32' self._tester.test_that_args =", "self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus/R82-12901.0.0']) def testFlashSkip(self): \"\"\"Tests flash command is skipped", "# Ensure the chrome test is run. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'cd", "cros_set_lsb_release from chromite.utils import outcap pytestmark = cros_test_lib.pytestmark_inside_only assert sys.version_info >= (3, 6),", "'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin']) def testExpressionBaseTastTest(self): \"\"\"Verify running a set of", "-i /dev/null' localhost:9222\" ' accessibility_Sanity'), self.caplog.text) class CrOSTesterTast(CrOSTesterBase): \"\"\"Tests tast test cases.\"\"\" def", "an expression.\"\"\" self._tester.tast = [ '((\"dep:chrome\" || \"dep:android\") && !flaky && !disabled)' ]", "device command call when giving a cwd.\"\"\" self._tester.remote_cmd = True self._tester.cwd = '/usr/local/autotest'", "test cases.\"\"\" def testBasicAutotest(self): \"\"\"Tests a simple autotest call.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.Run()", "options. build_dir = cros_test.ParseCommandLine( ['--chrome-test', '--', test_dir]).build_dir self.assertEqual(build_dir, os.path.dirname(test_dir)) def testParserErrorBuild(self): \"\"\"Verify parser", "from the SimpleChrome SDK.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester._device.private_key = '/tmp/.ssh/testing_rsa' tast_cache_dir = cros_test_lib.FakeSDKCache(", "target directory. self.assertCommandContains('cd /usr/local/cros_test && crypto_unittests ' '--test-launcher-print-test-stdio=always') # Ensure target directory is", "\"\"\"Verify we get parser errors when using certain commands.\"\"\" # Parser error if", "'ssh', '-p', '9222', 'root@localhost', '--', '/usr/local/autotest/bin/vm_sanity.py' ]) def testCatapult(self): \"\"\"Verify catapult test command.\"\"\"", "a BSD-style license that can be # found in the LICENSE file. \"\"\"Unit", "args as a list if it is given as a string. if isinstance(args,", "..') # Parser error when a non-existent file is passed to --files. self.CheckParserError(['--files',", "provided. self.CheckParserError('--remote-cmd', 'specify test command') # Parser error if using chronos without a", "Chrome test arguments. \"\"\" # Ensure chrome is being built. self.assertCommandContains(['autoninja', '-C', build_dir,", "no results destination dir is given. self.CheckParserError(['--results-src', '/tmp/results'], 'with results-src') # Parser error", "when both --files and --files-from are specified. self.CheckParserError(['--files', 'file_list', '--files-from', 'file'], '--files and", "'tast'], expected=False) @pytest.mark.usefixtures('testcase_caplog') class CrOSTesterAutotest(CrOSTesterBase): \"\"\"Tests autotest test cases.\"\"\" def testBasicAutotest(self): \"\"\"Tests a", "'/tmp/results/test_results'] self._tester.results_dest_dir = self.TempFilePath('results_dir') osutils.SafeMakedirs(self._tester.results_dest_dir) self._tester.Run() for filename in self._tester.results_src: self.assertCommandContains(['scp', 'root@localhost:%s' %", "mock version has been called. # TODO(crbug/1065172): Invalid assertion that had previously been", "= self.TempFilePath('out_amd64-generic/Release') self._tester.nostrip = True self._tester.mount = True self._tester.Run() self.assertCommandContains(['--nostrip', '--mount']) def testFetchResults(self):", "self._tester.results_src = ['/tmp/results/cmd_results', '/tmp/results/filename.txt', '/tmp/results/test_results'] self._tester.results_dest_dir = self.TempFilePath('results_dir') osutils.SafeMakedirs(self._tester.results_dest_dir) self._tester.Run() for filename in", "Ensure FileList returns files when files_from is None. files = ['/tmp/filename1', '/tmp/filename2'] self.assertEqual(files,", "error when additional args don't start with --. self.CheckParserError(['--host-cmd', 'tast', 'run'], 'must start", "is given. self.CheckParserError(['--results-src', '/tmp/results'], 'with results-src') # Parser error if no results source", "= test_exe self._tester.chrome_test_deploy_target_dir = '/usr/local/chrome_test' # test_label looks like //crypto:crypto_unittests. # label_root extracts", "skipped when not needed.\"\"\" self._tester.flash = True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release = {", "/dev/null -i /dev/null', 'localhost:9222', 'accessibility_Sanity']) def testAutotestWithArgs(self): \"\"\"Tests an autotest call with attributes.\"\"\"", "cros_test.FileList(files, files_from)) # Ensure FileList uses 'files_from' and ignores 'files'. file_list = ['/tmp/file1',", "is being built. self.assertCommandContains(['autoninja', '-C', build_dir, test_exe]) # Ensure that the runtime dependencies", "using rsync to copy.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:' + test_exe self.SetUpChromeTest(test_exe,", "is set when not specified.\"\"\" test_dir = self.TempFilePath('out_amd64-generic/Release/crypto_unittests') # Retrieves the build directory", "cwd refers to a parent path. self.CheckParserError(['--cwd', '../new_cwd'], 'cwd cannot start with ..')", "Parser error if no build directory is specified. self.CheckParserError('--build', '--build-dir') # Parser error", "self._tester.args = ['./bin/vm_sanity.py'] self._tester.Run() # Ensure command runs in the autotest directory. self.assertCommandContains('cd", "An instance of cros_test.CrOSTest. \"\"\" opts = cros_test.ParseCommandLine(opts if opts else []) opts.enable_kvm", "file. filename = '/tmp/dest_dir_file' osutils.Touch(filename) self.CheckParserError(['--results-src', '/tmp/results', '--results-dest-dir', filename], 'existing file') def testParserErrorCommands(self):", "from chromite.lib import partial_mock from chromite.scripts import cros_set_lsb_release from chromite.utils import outcap pytestmark", "self._tester.Run() self.assertCommandContains(['deploy_chrome', '--force', '--build-dir', self._tester.build_dir, '--process-timeout', '180', '--device', self._tester._device.device + ':9222', '--board', 'amd64-generic',", "True self._tester.build_dir = '/some/chromium/dir' self._tester.args = ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'] self._tester.Run() # Ensure", "osutils.SafeMakedirs(self._tester.results_dest_dir) self._tester.Run() for filename in self._tester.results_src: self.assertCommandContains(['scp', 'root@localhost:%s' % filename, self._tester.results_dest_dir]) def testFileList(self):", "authorized with chronos. self.assertCommandContains(['cp', '-r', '/root/.ssh/', '/home/chronos/user/']) # Ensure chronos has ownership of", "self.CheckParserError(['--host-cmd', 'tast', 'run'], 'must start with') def testParserErrorCWD(self): \"\"\"Verify we get parser errors", "\"\"\" self._tester.remote_cmd = True self._tester.args = ['/usr/local/autotest/bin/vm_sanity.py'] self._tester.Run() self.assertCommandContains(['ssh', '-p', '9222', '/usr/local/autotest/bin/vm_sanity.py']) self.assertCommandContains(['mkdir',", "must be an absolute path') def testParserErrorFiles(self): \"\"\"Verify we get parser errors with", "if new VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Check if new VM is", "for the build dir, and ensure an # exception is not raised if", "VM. self.assertCommandContains( ['ssh', '-p', '12345', 'root@localhost', '--', 'true']) def testFlash(self): \"\"\"Tests flash command.\"\"\"", "\"\"\"Tests autotest test cases.\"\"\" def testBasicAutotest(self): \"\"\"Tests a simple autotest call.\"\"\" self._tester.autotest =", "Checks that autotest is running. self.assertCommandContains([ 'test_that', '--no-quickmerge', '--ssh_options', '-F /dev/null -i /dev/null',", "directory is created on the DUT. self.assertCommandContains(['mkdir', '-p', '/usr/local/cros_test']) # Ensure test ssh", "tast tests with an expression.\"\"\" self._tester.tast = [ '((\"dep:chrome\" || \"dep:android\") && !flaky", "self.assertCommandContains( [os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'localhost', 'xbuddy://remote/octopus/R82-12901.0.0'], expected=False) def testDeployChrome(self): \"\"\"Tests basic deploy chrome", "GPU if needed. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'stop ui']) # Ensure a", "True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12900.0.0', } self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR,", "to copy.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:' + test_exe self.SetUpChromeTest(test_exe, test_label) self._tester.Run()", "self.createTester() def TempFilePath(self, file_path): \"\"\"Creates a temporary file path lasting for the duration", "= '/usr/local/chrome_test' # test_label looks like //crypto:crypto_unittests. # label_root extracts 'crypto' from the", "rsync_mock): \"\"\"Verify build/deploy and chrome test commands using scp to copy.\"\"\" test_exe =", "else []) opts.enable_kvm = True # We check if /dev/kvm is writeable to", "class CrOSTesterTast(CrOSTesterBase): \"\"\"Tests tast test cases.\"\"\" def testSingleBaseTastTest(self): \"\"\"Verify running a single tast", "no command. self.CheckParserError('--some_test some_command', '--remote-cmd or --host-cmd or --chrome-test') # Parser error when", "set of tast tests with an expression.\"\"\" self._tester.tast = [ '((\"dep:chrome\" || \"dep:android\")", "/usr/local/autotest/bin/' 'vm_sanity.py' % self._tester.cwd], expected=False) self.assertCommandContains(['rm', '-rf'], expected=False) def testHostCmd(self): \"\"\"Verify running a", "with self.assertRaises(SystemExit): with outcap.OutputCapturer() as output: cros_test.ParseCommandLine(args) self.assertIn(error_msg, output.GetStderr()) def testParserErrorChromeTest(self): \"\"\"Verify we", "when specifying the cwd.\"\"\" # Parser error if the cwd refers to a", "'is not a file') # Parser error when a file in --files has", "-*- # Copyright 2019 The Chromium OS Authors. All rights reserved. # Use", "simple autotest call.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.Run() # Check VM got launched. self.assertCommandContains([self._tester._device.qemu_path,", "directory from the parsed options. build_dir = cros_test.ParseCommandLine( ['--chrome-test', '--', test_dir]).build_dir self.assertEqual(build_dir, os.path.dirname(test_dir))", "All rights reserved. # Use of this source code is governed by a", "isolate map. osutils.WriteFile(isolate_map, \"\"\"{ \"%s\": { \"label\": \"%s\", \"type\": \"console_test_launcher\", } }\"\"\" %", "# Ensure FileList returns files when files_from is None. files = ['/tmp/filename1', '/tmp/filename2']", "the chrome test. build_dir: The directory where chrome is built. test_args: Chrome test", "command with additional arguments.\"\"\" self._tester.deploy = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.nostrip = True", "self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'stop ui']) # Ensure a user activity ping", "['--chrome-test', '--', test_dir]).build_dir self.assertEqual(build_dir, os.path.dirname(test_dir)) def testParserErrorBuild(self): \"\"\"Verify parser errors for building/deploying Chrome.\"\"\"", "an xbuddy link. self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus/R82-12901.0.0'])", "'-rf'], expected=False) def testHostCmd(self): \"\"\"Verify running a host command.\"\"\" self._tester.host_cmd = True self._tester.build_dir", "has been called. # TODO(crbug/1065172): Invalid assertion that had previously been mocked. #", "list of arguments of the particular chrome test. \"\"\" self._tester.args = [test_exe] +", "reserved. # Use of this source code is governed by a BSD-style license", "with various arguments.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.test_timeout = 100 self._tester._device.log_level = 'debug' self._tester._device.should_start_vm", "test_exe), '../../third_party/chromite'] # Creates the test_exe to be an executable. osutils.Touch(os.path.join(self._tester.build_dir, runtime_deps[0]), mode=0o700)", "running a chrome test. Args: test_exe: The name of the chrome test. test_label:", "get a parser error for --chrome-test when no args are given.\"\"\" self.CheckParserError('--chrome-test', '--chrome-test')", "--files-from does not exist. self.CheckParserError(['--files-from', '/fake/file'], 'is not a file') # Parser error", "self._tester.results_dir = 'test_results' self._tester._device.private_key = '.ssh/testing_rsa' self._tester._device.log_level = 'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port", "['test_that', '--board', 'amd64-generic', '--results_dir', test_results_dir, '--ssh_private_key', testing_rsa_dir, '--debug', '--allow-chrome-crashes', '--no-quickmerge', '--ssh_options', '-F /dev/null", "with') def testParserErrorCWD(self): \"\"\"Verify we get parser errors when specifying the cwd.\"\"\" #", "the test_exe to be an executable. osutils.Touch(os.path.join(self._tester.build_dir, runtime_deps[0]), mode=0o700) for dep in runtime_deps[1:]:", "files_from does not exist. files_from = self.TempFilePath('file_list') self.assertEqual(files, cros_test.FileList(files, files_from)) # Ensure FileList", "results-dest-dir') # Parser error if results destination dir is a file. filename =", "files are being copied over to the device using scp. self.assertCommandContains(['scp', '%s/' %", "CrOSTest.\"\"\" from __future__ import print_function import os import sys import mock import pytest", "stopped so the test can grab the GPU if needed. self.assertCommandContains(['ssh', '-p', '9222',", "[self.TempFilePath('crypto_unittests')] osutils.Touch(self._tester.files[0], mode=0o700) self._tester.as_chronos = True self._tester.args = ['crypto_unittests', '--test-launcher-print-test-stdio=always'] self._tester.Run() # Ensure", "runtime dependencies are checked for. self.assertCommandContains(['gn', 'desc', build_dir, test_label, 'runtime_deps']) # Ensure UI", "Cmd-line args to cros_test used to build a CrOSTest. Returns: An instance of", "&& !flaky && !disabled)' ] self._tester.Run() self.assertCommandContains([ 'tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222',", "over to the device using rsync. self.assertCommandContains(['rsync', '%s/' % self._tester.staging_dir, '[root@localhost]:/usr/local/chrome_test']) @mock.patch('chromite.lib.remote_access.RemoteDevice.HasRsync', return_value=False)", "don't start with --. self.CheckParserError(['--host-cmd', 'tast', 'run'], 'must start with') def testParserErrorCWD(self): \"\"\"Verify", "is a file. filename = '/tmp/dest_dir_file' osutils.Touch(filename) self.CheckParserError(['--results-src', '/tmp/results', '--results-dest-dir', filename], 'existing file')", "filename = '/tmp/dest_dir_file' osutils.Touch(filename) self.CheckParserError(['--results-src', '/tmp/results', '--results-dest-dir', filename], 'existing file') def testParserErrorCommands(self): \"\"\"Verify", "be an executable. osutils.Touch(os.path.join(self._tester.build_dir, runtime_deps[0]), mode=0o700) for dep in runtime_deps[1:]: osutils.Touch(os.path.join(self._tester.build_dir, dep), makedirs=True)", "self._tester.Run() # Check VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Checks that autotest is", "'/usr/local/autotest/bin/vm_sanity.py' ]) def testCatapult(self): \"\"\"Verify catapult test command.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.Run() self.assertCommandContains([", "\"\"\"Verify a run device command call when giving a cwd.\"\"\" self._tester.remote_cmd = True", "-i /dev/null', '172.16.17.32', 'accessibility_Sanity'], dryrun=False, enter_chroot=True) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=True) def testInsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running", "ssh since it runs on the host. self.assertCommandContains(['ssh', 'tast'], expected=False) @pytest.mark.usefixtures('testcase_caplog') class CrOSTesterAutotest(CrOSTesterBase):", "testCatapultAsGuest(self): \"\"\"Verify that we use the correct browser in guest mode.\"\"\" self._tester.catapult_tests =", "\"%s\": { \"label\": \"%s\", \"type\": \"console_test_launcher\", } }\"\"\" % (test_exe, test_label), makedirs=True) self._tester.build", "as a list if it is given as a string. if isinstance(args, str):", "# Parser error when --files-from does not exist. self.CheckParserError(['--files-from', '/fake/file'], 'is not a", "args = ' '.join(test_args) if test_args else '' # Ensure the chrome test", "is supported for tests.\"\"\" self._tester = self.createTester(opts=['--ssh-port=12345']) self._tester.start_vm = True self._tester.Run() # Check", "osutils.Touch(filename) self.CheckParserError(['--results-src', '/tmp/results', '--results-dest-dir', filename], 'existing file') def testParserErrorCommands(self): \"\"\"Verify we get parser", "source code is governed by a BSD-style license that can be # found", "= os.path.join(cwd, '.ssh/testing_rsa') self._tester._RunAutotest() self.assertCommandCalled( ['test_that', '--board', 'amd64-generic', '--results_dir', test_results_dir, '--ssh_private_key', testing_rsa_dir, '--debug',", "an executable. osutils.Touch(os.path.join(self._tester.build_dir, runtime_deps[0]), mode=0o700) for dep in runtime_deps[1:]: osutils.Touch(os.path.join(self._tester.build_dir, dep), makedirs=True) #", "over to the device using scp. self.assertCommandContains(['scp', '%s/' % self._tester.staging_dir, 'root@localhost:/usr/local/chrome_test']) rsync_mock.assert_called() def", "True # We check if /dev/kvm is writeable to use sudo. with mock.patch.object(os,", "'9222', '/usr/local/autotest/bin/vm_sanity.py']) self.assertCommandContains(['mkdir', '-p'], expected=False) self.assertCommandContains(['cd %s && /usr/local/autotest/bin/' 'vm_sanity.py' % self._tester.cwd], expected=False)", "\"\"\"Verify that results files/directories are copied from the DUT.\"\"\" self._tester.results_src = ['/tmp/results/cmd_results', '/tmp/results/filename.txt',", "in the LICENSE file. \"\"\"Unit tests for CrOSTest.\"\"\" from __future__ import print_function import", "error when a non-existent file is passed to --files. self.CheckParserError(['--files', 'fake/file'], 'does not", "'/some/chromium/dir'}) # Ensure that --host-cmd does not invoke ssh since it runs on", "'-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', '((\"dep:chrome\" || \"dep:android\") && !flaky && !disabled)' ]) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot')", "with attributes.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir = 'test_results' self._tester._device.private_key = '.ssh/testing_rsa' self._tester._device.log_level =", "def testExpressionBaseTastTest(self): \"\"\"Verify running a set of tast tests with an expression.\"\"\" self._tester.tast", "for. \"\"\" # Recreate args as a list if it is given as", "= True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.Run() self.assertCommandContains(['deploy_chrome', '--force', '--build-dir', self._tester.build_dir, '--process-timeout', '180', '--device',", "\"\"\"Tests basic functionality.\"\"\" self._tester.Run() isrunning_mock.assert_called() # Run vm_sanity. self.assertCommandContains([ 'ssh', '-p', '9222', 'root@localhost',", "# Ensure chrome is being built. self.assertCommandContains(['autoninja', '-C', build_dir, test_exe]) # Ensure that", "chromite.utils import outcap pytestmark = cros_test_lib.pytestmark_inside_only assert sys.version_info >= (3, 6), 'This module", "when giving a cwd.\"\"\" self._tester.remote_cmd = True self._tester.cwd = '/usr/local/autotest' self._tester.args = ['./bin/vm_sanity.py']", "self.CheckParserError(['--files-from', '/fake/file'], 'is not a file') # Parser error when a file in", "self._tester.deploy = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.nostrip = True self._tester.mount = True self._tester.Run()", "expected=False) self.assertCommandContains(['rm', '-rf'], expected=False) def testHostCmd(self): \"\"\"Verify running a host command.\"\"\" self._tester.host_cmd =", "file') def testParserErrorCommands(self): \"\"\"Verify we get parser errors when using certain commands.\"\"\" #", "VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Check if new VM is responsive. self.assertCommandContains(", "the host. self.assertCommandContains(['ssh', 'tast'], expected=False) @pytest.mark.usefixtures('testcase_caplog') class CrOSTesterAutotest(CrOSTesterBase): \"\"\"Tests autotest test cases.\"\"\" def", "\"\"\"Verify parser errors for building/deploying Chrome.\"\"\" # Parser error if no build directory", "'run', '-build=false', '-waituntilready', '-timeout=100', '-resultsdir', '/tmp/results', '172.16.17.32', 'ui.ChromeLogin']) def testTastTestSDK(self): \"\"\"Verify running tast", "'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus/R82-12901.0.0']) def testFlashSkip(self): \"\"\"Tests flash command", "catapult test command.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system', 'testAddResults'", "command is run with an env var for the build dir, and ensure", "'root@localhost', '--', 'true']) def testStartVMCustomPort(self): \"\"\"Verify that a custom SSH port is supported", "Args: test_exe: The name of the chrome test. test_label: The label of the", "testParserErrorCWD(self): \"\"\"Verify we get parser errors when specifying the cwd.\"\"\" # Parser error", "the target. \"\"\" self._tester.remote_cmd = True self._tester.args = ['/usr/local/autotest/bin/vm_sanity.py'] self._tester.Run() self.assertCommandContains(['ssh', '-p', '9222',", "get parser errors when using certain commands.\"\"\" # Parser error if no test", "self.CheckParserError('--build', '--build-dir') # Parser error if build directory is not an existing directory.", "gets the latest canary. self._tester.flash = True self._tester.public_image = True self._tester._device.board = 'octopus'", "= ['accessibility_Sanity'] self._tester.results_dir = '/mnt/host/source/test_results' self._tester._device.private_key = '/mnt/host/source/.ssh/testing_rsa' self._tester._RunAutotest() self.assertCommandContains([ '--results_dir', '/mnt/host/source/test_results', '--ssh_private_key',", "on the DUT. self.assertCommandContains(['mkdir', '-p', '/usr/local/cros_test']) # Ensure test ssh keys are authorized", "string. if isinstance(args, str): args = [args] # Putting outcap.OutputCapturer() before assertRaises(SystemExit) #", "instance. label_root = test_label.split(':')[0].lstrip('/') # A few files used by the chrome test.", "(label_root, test_exe, test_exe), '../../third_party/chromite'] # Creates the test_exe to be an executable. osutils.Touch(os.path.join(self._tester.build_dir,", "['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'] self._tester.Run() # Ensure command is run with an env", "not specified.\"\"\" test_dir = self.TempFilePath('out_amd64-generic/Release/crypto_unittests') # Retrieves the build directory from the parsed", "import os import sys import mock import pytest # pylint: disable=import-error from chromite.lib", "chronos has ownership of the directory. self.assertCommandContains(['chown', '-R', 'chronos:', '/usr/local/cros_test']) # Ensure command", "'/some/chromium/dir' self._tester.args = ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'] self._tester.Run() # Ensure command is run", "Parser error when additional args don't start with --. self.CheckParserError(['--host-cmd', 'tast', 'run'], 'must", "build_dir = cros_test.ParseCommandLine( ['--chrome-test', '--', test_dir]).build_dir self.assertEqual(build_dir, os.path.dirname(test_dir)) def testParserErrorBuild(self): \"\"\"Verify parser errors", "board gets the latest canary. self._tester.flash = True self._tester.public_image = True self._tester._device.board =", "test_args) class CrOSTesterParser(CrOSTesterBase): \"\"\"Tests parser test cases.\"\"\" def CheckParserError(self, args, error_msg): \"\"\"Checks that", "Parser error when a non-existent file is passed to --files. self.CheckParserError(['--files', 'fake/file'], 'does", "= 'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port = None self._tester._device.device = '172.16.17.32' self._tester.results_dir =", "we get parser errors when specifying the cwd.\"\"\" # Parser error if the", "# Ensure chronos has ownership of the directory. self.assertCommandContains(['chown', '-R', 'chronos:', '/usr/local/cros_test']) #", "self._tester.Run() # Ensure command is run with an env var for the build", "self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir, test_args) class CrOSTesterParser(CrOSTesterBase): \"\"\"Tests parser test cases.\"\"\" def CheckParserError(self,", "self.assertIn(('cros_sdk -- test_that --board amd64-generic --no-quickmerge' \" --ssh_options '-F /dev/null -i /dev/null' localhost:9222\"", "def testParserErrorFiles(self): \"\"\"Verify we get parser errors with --files.\"\"\" # Parser error when", "'12345', 'root@localhost', '--', 'true']) def testFlash(self): \"\"\"Tests flash command.\"\"\" # Verify that specifying", "change the working directory or create a temp directory on the target. \"\"\"", "cases.\"\"\" def CheckParserError(self, args, error_msg): \"\"\"Checks that parser error is raised. Args: args:", "\"\"\"Verify running tast tests from the SimpleChrome SDK.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester._device.private_key =", "]) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot') def testTastTestWithOtherArgs(self, check_inside_chroot_mock): \"\"\"Verify running a single tast test with various", "'--browser=system', 'testAddResults' ]) def testCatapultAsGuest(self): \"\"\"Verify that we use the correct browser in", "tester._device.board = 'amd64-generic' tester._device.image_path = self.TempFilePath( 'chromiumos_qemu_image.bin') osutils.Touch(tester._device.image_path) version_str = ('QEMU emulator version", "a single tast test.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.Run() self.assertCommandContains(['tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm',", "self.assertCommandContains(['tast', '-verbose', 'run', '-build=false', '-waituntilready', '-timeout=100', '-resultsdir', '/tmp/results', '172.16.17.32', 'ui.ChromeLogin']) def testTastTestSDK(self): \"\"\"Verify", "'--ssh_options', '-F /dev/null -i /dev/null', '172.16.17.32', 'accessibility_Sanity'], dryrun=False, enter_chroot=True) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=True) def testInsideChrootAutotest(self,", "previously been mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] # Capture the run command.", "'[root@localhost]:/usr/local/chrome_test']) @mock.patch('chromite.lib.remote_access.RemoteDevice.HasRsync', return_value=False) def testChromeTestSCP(self, rsync_mock): \"\"\"Verify build/deploy and chrome test commands using", "'tast-cmd/usr/bin') osutils.SafeMakedirs(tast_bin_dir) self._tester.Run() self.assertCommandContains([ os.path.join(tast_bin_dir, 'tast'), 'run', '-build=false', '-waituntilready', '-remoterunner=%s' % os.path.join(tast_bin_dir, 'remote_test_runner'),", "version 2.6.0, Copyright (c) ' '2003-2008 <NAME>') self.rc.AddCmdResult(partial_mock.In('--version'), output=version_str) return tester def setUp(self):", "]) def testRunDeviceCmd(self): \"\"\"Verify a run device cmd call.\"\"\" self._tester.remote_cmd = True self._tester.files", "# Check if new VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Check if new", "self.CheckParserError('--remote-cmd', 'specify test command') # Parser error if using chronos without a test", "self.CheckParserError('--as-chronos', 'as-chronos') # Parser error if there are args, but no command. self.CheckParserError('--some_test", "'must start with') def testParserErrorCWD(self): \"\"\"Verify we get parser errors when specifying the", "Check that we enter the chroot before running test_that. self.assertIn(('cros_sdk -- test_that --board", "the chrome test. runtime_deps = [ './%s' % test_exe, 'gen.runtime/%s/%s/%s.runtime_deps' % (label_root, test_exe,", "a test command. self.CheckParserError('--as-chronos', 'as-chronos') # Parser error if there are args, but", "by a BSD-style license that can be # found in the LICENSE file.", "use the correct browser in guest mode.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.guest = True", "the correct browser in guest mode.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.guest = True self._tester.Run()", "running test_that. self.assertIn(('cros_sdk -- test_that --board amd64-generic --no-quickmerge' \" --ssh_options '-F /dev/null -i", "utf-8 -*- # Copyright 2019 The Chromium OS Authors. All rights reserved. #", "given.\"\"\" self.CheckParserError('--chrome-test', '--chrome-test') def testParserSetsBuildDir(self): \"\"\"Verify that the build directory is set when", "'-p', '9222', '/usr/local/autotest/bin/vm_sanity.py']) self.assertCommandContains(['mkdir', '-p'], expected=False) self.assertCommandContains(['cd %s && /usr/local/autotest/bin/' 'vm_sanity.py' % self._tester.cwd],", "if results destination dir is a file. filename = '/tmp/dest_dir_file' osutils.Touch(filename) self.CheckParserError(['--results-src', '/tmp/results',", "we get parser errors when using certain commands.\"\"\" # Parser error if no", "'localhost:9222', 'ui.ChromeLogin'] self._tester.Run() # Ensure command is run with an env var for", "partial_mock from chromite.scripts import cros_set_lsb_release from chromite.utils import outcap pytestmark = cros_test_lib.pytestmark_inside_only assert", "output=version_str) return tester def setUp(self): \"\"\"Common set up method for all tests.\"\"\" self._tester", "'run', 'localhost:9222', 'ui.ChromeLogin'] self._tester.Run() # Ensure command is run with an env var", "'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system-guest', 'testAddResults' ]) def testRunDeviceCmd(self): \"\"\"Verify a run device cmd", "# Ensure UI is stopped so the test can grab the GPU if", "start with ..') # Parser error when a non-existent file is passed to", "rsync to copy.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:' + test_exe self.SetUpChromeTest(test_exe, test_label)", "cannot start with ..') # Parser error if the cwd is not an", "We check if /dev/kvm is writeable to use sudo. with mock.patch.object(os, 'access', return_value=True):", "responsive. self.assertCommandContains( ['ssh', '-p', '9222', 'root@localhost', '--', 'true']) def testStartVMCustomPort(self): \"\"\"Verify that a", "# Parser error if no build directory is specified. self.CheckParserError('--build', '--build-dir') # Parser", "pylint: disable=import-error from chromite.lib import constants from chromite.lib import cros_test from chromite.lib import", "True self._tester.deploy = True self._tester.chrome_test_target = test_exe self._tester.chrome_test_deploy_target_dir = '/usr/local/chrome_test' # test_label looks", "it is given as a string. if isinstance(args, str): args = [args] #", "--files and --files-from are specified. self.CheckParserError(['--files', 'file_list', '--files-from', 'file'], '--files and --files-from') #", "a temporary file path lasting for the duration of a test.\"\"\" return os.path.join(self.tempdir,", "'9222', 'root@localhost', '--', 'true']) def testStartVMCustomPort(self): \"\"\"Verify that a custom SSH port is", "that a new VM is started before running tests.\"\"\" self._tester.start_vm = True self._tester.Run()", "copied over to the device using scp. self.assertCommandContains(['scp', '%s/' % self._tester.staging_dir, 'root@localhost:/usr/local/chrome_test']) rsync_mock.assert_called()", "from within the chroot.\"\"\" # Checks that mock version has been called. #", "to build a CrOSTest. Returns: An instance of cros_test.CrOSTest. \"\"\" opts = cros_test.ParseCommandLine(opts", "'-ephemeraldevserver=true', '-keyfile', '/tmp/.ssh/testing_rsa', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin' ]) class CrOSTesterChromeTest(CrOSTesterBase): \"\"\"Tests chrome test test", "a single tast test with various arguments.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.test_timeout = 100", "when a file has a bad path. self.CheckParserError(['--files', '../some_file'], 'cannot start with ..')", "build a CrOSTest. Returns: An instance of cros_test.CrOSTest. \"\"\" opts = cros_test.ParseCommandLine(opts if", "'-enable-kvm']) # Check if new VM is responsive. self.assertCommandContains( ['ssh', '-p', '9222', 'root@localhost',", "results destination dir is a file. filename = '/tmp/dest_dir_file' osutils.Touch(filename) self.CheckParserError(['--results-src', '/tmp/results', '--results-dest-dir',", "accessibility_Sanity'), self.caplog.text) class CrOSTesterTast(CrOSTesterBase): \"\"\"Tests tast test cases.\"\"\" def testSingleBaseTastTest(self): \"\"\"Verify running a", "doesn't # capture the cros_sdk wrapper. self._tester._RunAutotest() # Check that we enter the", "test_args: Chrome test arguments. \"\"\" # Ensure chrome is being built. self.assertCommandContains(['autoninja', '-C',", "--chrome-test') # Parser error when additional args don't start with --. self.CheckParserError(['--host-cmd', 'tast',", "= False self._tester._device.ssh_port = None self._tester._device.device = '172.16.17.32' self._tester.results_dir = '/tmp/results' self._tester.Run() check_inside_chroot_mock.assert_called()", "self._tester._RunAutotest() self.assertCommandContains([ '--results_dir', '/mnt/host/source/test_results', '--ssh_private_key', '/mnt/host/source/.ssh/testing_rsa']) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=False) def testOutsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running", "runtime_deps[0]), mode=0o700) for dep in runtime_deps[1:]: osutils.Touch(os.path.join(self._tester.build_dir, dep), makedirs=True) # Mocks the output", "and chrome test commands when a test arg is given.\"\"\" test_exe = 'crypto_unittests'", "= ('QEMU emulator version 2.6.0, Copyright (c) ' '2003-2008 <NAME>') self.rc.AddCmdResult(partial_mock.In('--version'), output=version_str) return", "to use sudo. with mock.patch.object(os, 'access', return_value=True): tester = cros_test.CrOSTest(opts) tester._device.use_sudo = False", "constants from chromite.lib import cros_test from chromite.lib import cros_test_lib from chromite.lib import osutils", "\"\"\"Tests miscellaneous test cases.\"\"\" @mock.patch('chromite.lib.vm.VM.IsRunning', return_value=True) def testBasic(self, isrunning_mock): \"\"\"Tests basic functionality.\"\"\" self._tester.Run()", "self._tester.args = ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'] self._tester.Run() # Ensure command is run with", "a run device command call when giving a cwd.\"\"\" self._tester.remote_cmd = True self._tester.cwd", "'localhost:9222', 'accessibility_Sanity']) def testAutotestWithArgs(self): \"\"\"Tests an autotest call with attributes.\"\"\" self._tester.autotest = ['accessibility_Sanity']", "'.join(test_args) if test_args else '' # Ensure the chrome test is run. self.assertCommandContains(['ssh',", "Capture the run command. This is necessary beacuse the mock doesn't # capture", "def testParserErrorResultsSrc(self): \"\"\"Verify parser errors for results src/dest directories.\"\"\" # Parser error if", "= False tester._device.board = 'amd64-generic' tester._device.image_path = self.TempFilePath( 'chromiumos_qemu_image.bin') osutils.Touch(tester._device.image_path) version_str = ('QEMU", "self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12901.0.0', } self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run()", "chromite.scripts import cros_set_lsb_release from chromite.utils import outcap pytestmark = cros_test_lib.pytestmark_inside_only assert sys.version_info >=", "class CrOSTester(CrOSTesterBase): \"\"\"Tests miscellaneous utility methods\"\"\" def testStartVM(self): \"\"\"Verify that a new VM", "'../some_file'], 'cannot start with ..') # Parser error when a non-existent file is", "'--chrome-test') def testParserSetsBuildDir(self): \"\"\"Verify that the build directory is set when not specified.\"\"\"", "+ test_exe self.SetUpChromeTest(test_exe, test_label) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir) # Ensure files are being", "that can be # found in the LICENSE file. \"\"\"Unit tests for CrOSTest.\"\"\"", "Parser error if results destination dir is a file. filename = '/tmp/dest_dir_file' osutils.Touch(filename)", "map. osutils.WriteFile(isolate_map, \"\"\"{ \"%s\": { \"label\": \"%s\", \"type\": \"console_test_launcher\", } }\"\"\" % (test_exe,", "directory is set when not specified.\"\"\" test_dir = self.TempFilePath('out_amd64-generic/Release/crypto_unittests') # Retrieves the build", "Mocks the output by providing necessary runtime files. self.rc.AddCmdResult( partial_mock.InOrder(['gn', 'desc', test_label]), output='\\n'.join(runtime_deps))", "parser errors when specifying the cwd.\"\"\" # Parser error if the cwd refers", "\"\"\"Tests miscellaneous utility methods\"\"\" def testStartVM(self): \"\"\"Verify that a new VM is started", "'/tmp/.ssh/testing_rsa', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin' ]) class CrOSTesterChromeTest(CrOSTesterBase): \"\"\"Tests chrome test test cases.\"\"\" def", "self._tester.args = ['crypto_unittests', '--test-launcher-print-test-stdio=always'] self._tester.Run() # Ensure target directory is created on the", "'--process-timeout', '180', '--device', self._tester._device.device + ':9222', '--board', 'amd64-generic', '--cache-dir', self._tester.cache_dir]) def testDeployChromeWithArgs(self): \"\"\"Tests", ">= (3, 6), 'This module requires Python 3.6+' # pylint: disable=protected-access class CrOSTesterBase(cros_test_lib.RunCommandTempDirTestCase):", "su chronos -c -- ' '\"out_amd64-generic/Release/%s %s\"' % (test_exe, args)]) def testChromeTestRsync(self): \"\"\"Verify", "created on the DUT. self.assertCommandContains(['mkdir', '-p', '/usr/local/cros_test']) # Ensure test ssh keys are", "-*- coding: utf-8 -*- # Copyright 2019 The Chromium OS Authors. All rights", "or --host-cmd or --chrome-test') # Parser error when additional args don't start with", "cwd.\"\"\" # Parser error if the cwd refers to a parent path. self.CheckParserError(['--cwd',", "' '--test-launcher-print-test-stdio=always') # Ensure target directory is removed at the end of the", "self.SetUpChromeTest(test_exe, test_label, test_args) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir, test_args) class CrOSTesterParser(CrOSTesterBase): \"\"\"Tests parser test", "self._tester.tast = ['ui.ChromeLogin'] self._tester.test_timeout = 100 self._tester._device.log_level = 'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port", "keys are authorized with chronos. self.assertCommandContains(['cp', '-r', '/root/.ssh/', '/home/chronos/user/']) # Ensure chronos has", "None)) # Ensure FileList returns files when files_from does not exist. files_from =", "test_label, test_args=None): \"\"\"Sets configurations necessary for running a chrome test. Args: test_exe: The", "path. self.CheckParserError(['--cwd', 'tmp/cwd'], 'cwd must be an absolute path') def testParserErrorFiles(self): \"\"\"Verify we", "correct files.\"\"\" # Ensure FileList returns files when files_from is None. files =", "start with') def testParserErrorCWD(self): \"\"\"Verify we get parser errors when specifying the cwd.\"\"\"", "and ensure an # exception is not raised if it fails. self.assertCommandCalled( ['tast',", "testInsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an autotest from within the chroot.\"\"\" # Checks that", "'accessibility_Sanity']) def testAutotestWithArgs(self): \"\"\"Tests an autotest call with attributes.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir", "the test. self.assertCommandContains(['rm', '-rf', '/usr/local/cros_test']) def testRunDeviceCmdWithSetCwd(self): \"\"\"Verify a run device command call", "def testCatapult(self): \"\"\"Verify catapult test command.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/'", "in this instance. label_root = test_label.split(':')[0].lstrip('/') # A few files used by the", "disable=protected-access class CrOSTesterBase(cros_test_lib.RunCommandTempDirTestCase): \"\"\"Base class for setup and creating a temp file path.\"\"\"", "exception check. with self.assertRaises(SystemExit): with outcap.OutputCapturer() as output: cros_test.ParseCommandLine(args) self.assertIn(error_msg, output.GetStderr()) def testParserErrorChromeTest(self):", "if --results-src is not absolute. self.CheckParserError(['--results-src', 'tmp/results'], 'absolute') # Parser error if no", "new VM is started before running tests.\"\"\" self._tester.start_vm = True self._tester.Run() # Check", "os.path.relpath(os.getcwd(), constants.SOURCE_ROOT)) test_results_dir = os.path.join(cwd, 'test_results') testing_rsa_dir = os.path.join(cwd, '.ssh/testing_rsa') self._tester._RunAutotest() self.assertCommandCalled( ['test_that',", "CheckParserError(self, args, error_msg): \"\"\"Checks that parser error is raised. Args: args: List of", "return os.path.join(self.tempdir, file_path) class CrOSTester(CrOSTesterBase): \"\"\"Tests miscellaneous utility methods\"\"\" def testStartVM(self): \"\"\"Verify that", "chromite.lib import constants from chromite.lib import cros_test from chromite.lib import cros_test_lib from chromite.lib", "error if build directory is not an existing directory. self.CheckParserError(['--deploy', '--build-dir', '/not/a/directory'], 'not", "run device command call when giving a cwd.\"\"\" self._tester.remote_cmd = True self._tester.cwd =", "def testChromeTestSCP(self, rsync_mock): \"\"\"Verify build/deploy and chrome test commands using scp to copy.\"\"\"", "Verify that specifying the board gets the latest canary. self._tester.flash = True self._tester.public_image", "test command is provided. self.CheckParserError('--remote-cmd', 'specify test command') # Parser error if using", "self._tester.cache_dir]) def testDeployChromeWithArgs(self): \"\"\"Tests deploy chrome command with additional arguments.\"\"\" self._tester.deploy = True", "self._tester.Run() isrunning_mock.assert_called() # Run vm_sanity. self.assertCommandContains([ 'ssh', '-p', '9222', 'root@localhost', '--', '/usr/local/autotest/bin/vm_sanity.py' ])", "'root@localhost', '--', 'cd /usr/local/chrome_test && su chronos -c -- ' '\"out_amd64-generic/Release/%s %s\"' %", "FileList uses 'files_from' and ignores 'files'. file_list = ['/tmp/file1', '/tmp/file2', '/tmp/file3'] osutils.WriteFile(files_from, '\\n'.join(file_list))", "chrome test. Args: test_exe: The name of the chrome test. test_label: The label", "# Ensure command is run with an env var for the build dir,", "self._tester._device.device + ':9222', '--board', 'amd64-generic', '--cache-dir', self._tester.cache_dir]) def testDeployChromeWithArgs(self): \"\"\"Tests deploy chrome command", "and --files-from are specified. self.CheckParserError(['--files', 'file_list', '--files-from', 'file'], '--files and --files-from') # Parser", "single tast test.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.Run() self.assertCommandContains(['tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222',", "capture the cros_sdk wrapper. self._tester._RunAutotest() # Check that we enter the chroot before", "= '172.16.17.32' self._tester.test_that_args = ['--test_that-args', '--allow-chrome-crashes'] cwd = os.path.join('/mnt/host/source', os.path.relpath(os.getcwd(), constants.SOURCE_ROOT)) test_results_dir =", "self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12900.0.0', } self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'),", "link. self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus/R82-12901.0.0']) def testFlashSkip(self):", "'cd /usr/local/chrome_test && su chronos -c -- ' '\"out_amd64-generic/Release/%s %s\"' % (test_exe, args)])", "os.path.join(self.tempdir, file_path) class CrOSTester(CrOSTesterBase): \"\"\"Tests miscellaneous utility methods\"\"\" def testStartVM(self): \"\"\"Verify that a", "cwd.\"\"\" self._tester.remote_cmd = True self._tester.cwd = '/usr/local/autotest' self._tester.args = ['./bin/vm_sanity.py'] self._tester.Run() # Ensure", "self._tester.args = ['/usr/local/autotest/bin/vm_sanity.py'] self._tester.Run() self.assertCommandContains(['ssh', '-p', '9222', '/usr/local/autotest/bin/vm_sanity.py']) self.assertCommandContains(['mkdir', '-p'], expected=False) self.assertCommandContains(['cd %s", "a cwd.\"\"\" self._tester.remote_cmd = True self._tester.cwd = '/usr/local/autotest' self._tester.args = ['./bin/vm_sanity.py'] self._tester.Run() #", "'chromiumos_qemu_image.bin') osutils.Touch(tester._device.image_path) version_str = ('QEMU emulator version 2.6.0, Copyright (c) ' '2003-2008 <NAME>')", "expression.\"\"\" self._tester.tast = [ '((\"dep:chrome\" || \"dep:android\") && !flaky && !disabled)' ] self._tester.Run()", "'9222', 'root@localhost', '--', '/usr/local/autotest/bin/vm_sanity.py' ]) def testCatapult(self): \"\"\"Verify catapult test command.\"\"\" self._tester.catapult_tests =", "test_exe, test_label, test_args=None): \"\"\"Sets configurations necessary for running a chrome test. Args: test_exe:", "call.\"\"\" self._tester.remote_cmd = True self._tester.files = [self.TempFilePath('crypto_unittests')] osutils.Touch(self._tester.files[0], mode=0o700) self._tester.as_chronos = True self._tester.args", "self._tester.chrome_test = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') osutils.SafeMakedirs(self._tester.build_dir) isolate_map = self.TempFilePath('testing/buildbot/gn_isolate_map.pyl') # Add info", "\"dep:android\") && !flaky && !disabled)' ]) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot') def testTastTestWithOtherArgs(self, check_inside_chroot_mock): \"\"\"Verify running a", "Parser error when a file has a bad path. self.CheckParserError(['--files', '../some_file'], 'cannot start", "isrunning_mock.assert_called() # Run vm_sanity. self.assertCommandContains([ 'ssh', '-p', '9222', 'root@localhost', '--', '/usr/local/autotest/bin/vm_sanity.py' ]) def", "the build dir, and ensure an # exception is not raised if it", "mock.patch.object(os, 'access', return_value=True): tester = cros_test.CrOSTest(opts) tester._device.use_sudo = False tester._device.board = 'amd64-generic' tester._device.image_path", "# Ensure files are being copied over to the device using scp. self.assertCommandContains(['scp',", "without a test command. self.CheckParserError('--as-chronos', 'as-chronos') # Parser error if there are args,", "chronos. self.assertCommandContains(['cp', '-r', '/root/.ssh/', '/home/chronos/user/']) # Ensure chronos has ownership of the directory.", "CrOSTesterParser(CrOSTesterBase): \"\"\"Tests parser test cases.\"\"\" def CheckParserError(self, args, error_msg): \"\"\"Checks that parser error", "an autotest from within the chroot.\"\"\" # Checks that mock version has been", "command should not change the working directory or create a temp directory on", "'test_results' self._tester._device.private_key = '.ssh/testing_rsa' self._tester._device.log_level = 'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port = None", "'org.chromium.PowerManager.HandleUserActivity', 'int32:0']) args = ' '.join(test_args) if test_args else '' # Ensure the", "def testBasic(self, isrunning_mock): \"\"\"Tests basic functionality.\"\"\" self._tester.Run() isrunning_mock.assert_called() # Run vm_sanity. self.assertCommandContains([ 'ssh',", "for. self.assertCommandContains(['gn', 'desc', build_dir, test_label, 'runtime_deps']) # Ensure UI is stopped so the", "self._tester._RunAutotest() self.assertCommandCalled( ['test_that', '--board', 'amd64-generic', '--results_dir', test_results_dir, '--ssh_private_key', testing_rsa_dir, '--debug', '--allow-chrome-crashes', '--no-quickmerge', '--ssh_options',", "self.CheckParserError('--chrome-test', '--chrome-test') def testParserSetsBuildDir(self): \"\"\"Verify that the build directory is set when not", "'12900.0.0', } self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus-full/latest',]) # Specify an xbuddy", "'--allow-chrome-crashes'] cwd = os.path.join('/mnt/host/source', os.path.relpath(os.getcwd(), constants.SOURCE_ROOT)) test_results_dir = os.path.join(cwd, 'test_results') testing_rsa_dir = os.path.join(cwd,", "testing_rsa_dir, '--debug', '--allow-chrome-crashes', '--no-quickmerge', '--ssh_options', '-F /dev/null -i /dev/null', '172.16.17.32', 'accessibility_Sanity'], dryrun=False, enter_chroot=True)", "'stop ui']) # Ensure a user activity ping is sent to the device.", "an env var for the build dir, and ensure an # exception is", "amd64-generic --no-quickmerge' \" --ssh_options '-F /dev/null -i /dev/null' localhost:9222\" ' accessibility_Sanity'), self.caplog.text) class", "is writeable to use sudo. with mock.patch.object(os, 'access', return_value=True): tester = cros_test.CrOSTest(opts) tester._device.use_sudo", "rsync_mock.assert_called() def testChromeTestExeArg(self): \"\"\"Verify build/deploy and chrome test commands when a test arg", "# Retrieves the build directory from the parsed options. build_dir = cros_test.ParseCommandLine( ['--chrome-test',", "= ['./bin/vm_sanity.py'] self._tester.Run() # Ensure command runs in the autotest directory. self.assertCommandContains('cd /usr/local/autotest", "'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus/R82-12901.0.0']) def testFlashSkip(self): \"\"\"Tests flash command is skipped when not needed.\"\"\"", "['/tmp/file1', '/tmp/file2', '/tmp/file3'] osutils.WriteFile(files_from, '\\n'.join(file_list)) self.assertEqual(file_list, cros_test.FileList(files, files_from)) class CrOSTesterMiscTests(CrOSTesterBase): \"\"\"Tests miscellaneous test", "'/tmp/dest_dir'], 'with results-dest-dir') # Parser error if results destination dir is a file.", "test_dir]).build_dir self.assertEqual(build_dir, os.path.dirname(test_dir)) def testParserErrorBuild(self): \"\"\"Verify parser errors for building/deploying Chrome.\"\"\" # Parser", "run command. This is necessary beacuse the mock doesn't # capture the cros_sdk", "error_msg: Error message to check for. \"\"\" # Recreate args as a list", "in the autotest directory. self.assertCommandContains('cd /usr/local/autotest && ./bin/vm_sanity.py') def testRunDeviceCmdWithoutSrcFiles(self): \"\"\"Verify running a", "outcap pytestmark = cros_test_lib.pytestmark_inside_only assert sys.version_info >= (3, 6), 'This module requires Python", "class CrOSTesterMiscTests(CrOSTesterBase): \"\"\"Tests miscellaneous test cases.\"\"\" @mock.patch('chromite.lib.vm.VM.IsRunning', return_value=True) def testBasic(self, isrunning_mock): \"\"\"Tests basic", "False self._tester._device.ssh_port = None self._tester._device.device = '172.16.17.32' self._tester.test_that_args = ['--test_that-args', '--allow-chrome-crashes'] cwd =", "if the cwd refers to a parent path. self.CheckParserError(['--cwd', '../new_cwd'], 'cwd cannot start", "outcap.OutputCapturer() before assertRaises(SystemExit) # swallows SystemExit exception check. with self.assertRaises(SystemExit): with outcap.OutputCapturer() as", "test cases.\"\"\" @mock.patch('chromite.lib.vm.VM.IsRunning', return_value=True) def testBasic(self, isrunning_mock): \"\"\"Tests basic functionality.\"\"\" self._tester.Run() isrunning_mock.assert_called() #", "self.assertEqual(file_list, cros_test.FileList(files, files_from)) class CrOSTesterMiscTests(CrOSTesterBase): \"\"\"Tests miscellaneous test cases.\"\"\" @mock.patch('chromite.lib.vm.VM.IsRunning', return_value=True) def testBasic(self,", "'-remoterunner=%s' % os.path.join(tast_bin_dir, 'remote_test_runner'), '-remotebundledir=%s' % os.path.join(tast_cache_dir, 'tast-remote-tests-cros/usr', 'libexec/tast/bundles/remote'), '-remotedatadir=%s' % os.path.join( tast_cache_dir,", "\"\"\"Verify that we use the correct browser in guest mode.\"\"\" self._tester.catapult_tests = ['testAddResults']", "--files.\"\"\" # Parser error when both --files and --files-from are specified. self.CheckParserError(['--files', 'file_list',", "all tests.\"\"\" self._tester = self.createTester() def TempFilePath(self, file_path): \"\"\"Creates a temporary file path", "testCatapult(self): \"\"\"Verify catapult test command.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests',", "copy.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:' + test_exe self.SetUpChromeTest(test_exe, test_label) self._tester.Run() self.CheckChromeTestCommands(test_exe,", "'/tmp/results' self._tester.Run() check_inside_chroot_mock.assert_called() self.assertCommandContains(['tast', '-verbose', 'run', '-build=false', '-waituntilready', '-timeout=100', '-resultsdir', '/tmp/results', '172.16.17.32', 'ui.ChromeLogin'])", "running tests.\"\"\" self._tester.start_vm = True self._tester.Run() # Check if new VM got launched.", "SystemExit exception check. with self.assertRaises(SystemExit): with outcap.OutputCapturer() as output: cros_test.ParseCommandLine(args) self.assertIn(error_msg, output.GetStderr()) def", "'172.16.17.32', 'accessibility_Sanity'], dryrun=False, enter_chroot=True) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=True) def testInsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an autotest", "built. test_args: Chrome test arguments. \"\"\" # Ensure chrome is being built. self.assertCommandContains(['autoninja',", "' '\"out_amd64-generic/Release/%s %s\"' % (test_exe, args)]) def testChromeTestRsync(self): \"\"\"Verify build/deploy and chrome test", "of this source code is governed by a BSD-style license that can be", "DUT. self.assertCommandContains(['mkdir', '-p', '/usr/local/cros_test']) # Ensure test ssh keys are authorized with chronos.", "a string. if isinstance(args, str): args = [args] # Putting outcap.OutputCapturer() before assertRaises(SystemExit)", "is sent to the device. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'dbus-send', '--system', '--type=method_call',", "absolute path') def testParserErrorFiles(self): \"\"\"Verify we get parser errors with --files.\"\"\" # Parser", "True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.nostrip = True self._tester.mount = True self._tester.Run() self.assertCommandContains(['--nostrip', '--mount'])", "test_label: The label of the chrome test. build_dir: The directory where chrome is", "CrOSTesterBase(cros_test_lib.RunCommandTempDirTestCase): \"\"\"Base class for setup and creating a temp file path.\"\"\" def createTester(self,", "and --files-from') # Parser error when --files-from does not exist. self.CheckParserError(['--files-from', '/fake/file'], 'is", "the latest canary. self._tester.flash = True self._tester.public_image = True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release", "Parser error if --results-src is not absolute. self.CheckParserError(['--results-src', 'tmp/results'], 'absolute') # Parser error", "]) def testCatapult(self): \"\"\"Verify catapult test command.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.Run() self.assertCommandContains([ 'python',", "'-waituntilready', '-timeout=100', '-resultsdir', '/tmp/results', '172.16.17.32', 'ui.ChromeLogin']) def testTastTestSDK(self): \"\"\"Verify running tast tests from", "'--browser=system-guest', 'testAddResults' ]) def testRunDeviceCmd(self): \"\"\"Verify a run device cmd call.\"\"\" self._tester.remote_cmd =", "'-F /dev/null -i /dev/null' localhost:9222\" ' accessibility_Sanity'), self.caplog.text) class CrOSTesterTast(CrOSTesterBase): \"\"\"Tests tast test", "parent path. self.CheckParserError(['--cwd', '../new_cwd'], 'cwd cannot start with ..') # Parser error if", "import mock import pytest # pylint: disable=import-error from chromite.lib import constants from chromite.lib", "self.assertRaises(SystemExit): with outcap.OutputCapturer() as output: cros_test.ParseCommandLine(args) self.assertIn(error_msg, output.GetStderr()) def testParserErrorChromeTest(self): \"\"\"Verify we get", "['/tmp/filename1', '/tmp/filename2'] self.assertEqual(files, cros_test.FileList(files, None)) # Ensure FileList returns files when files_from does", "files/directories are copied from the DUT.\"\"\" self._tester.results_src = ['/tmp/results/cmd_results', '/tmp/results/filename.txt', '/tmp/results/test_results'] self._tester.results_dest_dir =", "of arguments of the particular chrome test. \"\"\" self._tester.args = [test_exe] + test_args", "self.assertEqual(files, cros_test.FileList(files, files_from)) # Ensure FileList uses 'files_from' and ignores 'files'. file_list =", "label of the chrome test. build_dir: The directory where chrome is built. test_args:", "'cannot start with ..') # Parser error when a non-existent file is passed", "that the runtime dependencies are checked for. self.assertCommandContains(['gn', 'desc', build_dir, test_label, 'runtime_deps']) #", "when src files are not specified. The remote command should not change the", "= 'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12901.0.0', } self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains(", "are specified. self.CheckParserError(['--files', 'file_list', '--files-from', 'file'], '--files and --files-from') # Parser error when", "# Ensure target directory is created on the DUT. self.assertCommandContains(['mkdir', '-p', '/usr/local/cros_test']) #", "Parser error if the cwd is not an absolute path. self.CheckParserError(['--cwd', 'tmp/cwd'], 'cwd", "os.path.join(tast_bin_dir, 'tast'), 'run', '-build=false', '-waituntilready', '-remoterunner=%s' % os.path.join(tast_bin_dir, 'remote_test_runner'), '-remotebundledir=%s' % os.path.join(tast_cache_dir, 'tast-remote-tests-cros/usr',", "# check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir = '/mnt/host/source/test_results' self._tester._device.private_key = '/mnt/host/source/.ssh/testing_rsa' self._tester._RunAutotest() self.assertCommandContains([", "= ['/tmp/filename1', '/tmp/filename2'] self.assertEqual(files, cros_test.FileList(files, None)) # Ensure FileList returns files when files_from", "tests.\"\"\" self._tester = self.createTester(opts=['--ssh-port=12345']) self._tester.start_vm = True self._tester.Run() # Check that we use", "['ssh', '-p', '12345', 'root@localhost', '--', 'true']) def testFlash(self): \"\"\"Tests flash command.\"\"\" # Verify", "module requires Python 3.6+' # pylint: disable=protected-access class CrOSTesterBase(cros_test_lib.RunCommandTempDirTestCase): \"\"\"Base class for setup", "testTastTestWithOtherArgs(self, check_inside_chroot_mock): \"\"\"Verify running a single tast test with various arguments.\"\"\" self._tester.tast =", "'--force', '--build-dir', self._tester.build_dir, '--process-timeout', '180', '--device', self._tester._device.device + ':9222', '--board', 'amd64-generic', '--cache-dir', self._tester.cache_dir])", "= False self._tester._device.ssh_port = None self._tester._device.device = '172.16.17.32' self._tester.test_that_args = ['--test_that-args', '--allow-chrome-crashes'] cwd", "directory is removed at the end of the test. self.assertCommandContains(['rm', '-rf', '/usr/local/cros_test']) def", "results-src') # Parser error if no results source is given. self.CheckParserError(['--results-dest-dir', '/tmp/dest_dir'], 'with", "self._tester.Run() # Ensure target directory is created on the DUT. self.assertCommandContains(['mkdir', '-p', '/usr/local/cros_test'])", "is given. self.CheckParserError(['--results-dest-dir', '/tmp/dest_dir'], 'with results-dest-dir') # Parser error if results destination dir", "Retrieves the build directory from the parsed options. build_dir = cros_test.ParseCommandLine( ['--chrome-test', '--',", "self.assertCommandContains(['mkdir', '-p'], expected=False) self.assertCommandContains(['cd %s && /usr/local/autotest/bin/' 'vm_sanity.py' % self._tester.cwd], expected=False) self.assertCommandContains(['rm', '-rf'],", "Parser error if using chronos without a test command. self.CheckParserError('--as-chronos', 'as-chronos') # Parser", "'desc', test_label]), output='\\n'.join(runtime_deps)) def CheckChromeTestCommands(self, test_exe, test_label, build_dir, test_args=None): \"\"\"Checks to see that", "test ssh keys are authorized with chronos. self.assertCommandContains(['cp', '-r', '/root/.ssh/', '/home/chronos/user/']) # Ensure", "file') # Parser error when a file in --files has an absolute path.", "from chromite.lib import cros_test from chromite.lib import cros_test_lib from chromite.lib import osutils from", "using rsync. self.assertCommandContains(['rsync', '%s/' % self._tester.staging_dir, '[root@localhost]:/usr/local/chrome_test']) @mock.patch('chromite.lib.remote_access.RemoteDevice.HasRsync', return_value=False) def testChromeTestSCP(self, rsync_mock): \"\"\"Verify", "= ['crypto_unittests', '--test-launcher-print-test-stdio=always'] self._tester.Run() # Ensure target directory is created on the DUT.", "testChromeTestSCP(self, rsync_mock): \"\"\"Verify build/deploy and chrome test commands using scp to copy.\"\"\" test_exe", "= '/mnt/host/source/.ssh/testing_rsa' self._tester._RunAutotest() self.assertCommandContains([ '--results_dir', '/mnt/host/source/test_results', '--ssh_private_key', '/mnt/host/source/.ssh/testing_rsa']) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=False) def testOutsideChrootAutotest(self, _check_inside_chroot_mock):", "error if no build directory is specified. self.CheckParserError('--build', '--build-dir') # Parser error if", "mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir = '/mnt/host/source/test_results' self._tester._device.private_key = '/mnt/host/source/.ssh/testing_rsa' self._tester._RunAutotest()", "Run vm_sanity. self.assertCommandContains([ 'ssh', '-p', '9222', 'root@localhost', '--', '/usr/local/autotest/bin/vm_sanity.py' ]) def testCatapult(self): \"\"\"Verify", "where chrome is built. test_args: Chrome test arguments. \"\"\" # Ensure chrome is", "a simple autotest call.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.Run() # Check VM got launched.", "2.6.0, Copyright (c) ' '2003-2008 <NAME>') self.rc.AddCmdResult(partial_mock.In('--version'), output=version_str) return tester def setUp(self): \"\"\"Common", "self.assertCommandContains(['cp', '-r', '/root/.ssh/', '/home/chronos/user/']) # Ensure chronos has ownership of the directory. self.assertCommandContains(['chown',", "has a bad path. self.CheckParserError(['--files', '../some_file'], 'cannot start with ..') # Parser error", "'xbuddy://remote/octopus/R82-12901.0.0'], expected=False) def testDeployChrome(self): \"\"\"Tests basic deploy chrome command.\"\"\" self._tester.deploy = True self._tester.build_dir", "from chromite.lib import constants from chromite.lib import cros_test from chromite.lib import cros_test_lib from", "chrome test commands using rsync to copy.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:'", "'--ssh_private_key', '/mnt/host/source/.ssh/testing_rsa']) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=False) def testOutsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an autotest from outside", "Args: args: List of commandline arguments. error_msg: Error message to check for. \"\"\"", "self._tester.deploy = True self._tester.chrome_test_target = test_exe self._tester.chrome_test_deploy_target_dir = '/usr/local/chrome_test' # test_label looks like", "def testParserSetsBuildDir(self): \"\"\"Verify that the build directory is set when not specified.\"\"\" test_dir", "filename, self._tester.results_dest_dir]) def testFileList(self): \"\"\"Verify that FileList returns the correct files.\"\"\" # Ensure", "\"\"\"Verify we get parser errors when specifying the cwd.\"\"\" # Parser error if", "be an absolute path') def testParserErrorFiles(self): \"\"\"Verify we get parser errors with --files.\"\"\"", "if isinstance(args, str): args = [args] # Putting outcap.OutputCapturer() before assertRaises(SystemExit) # swallows", "self.assertCommandContains('cd /usr/local/autotest && ./bin/vm_sanity.py') def testRunDeviceCmdWithoutSrcFiles(self): \"\"\"Verify running a remote command when src", "self.TempFilePath('results_dir') osutils.SafeMakedirs(self._tester.results_dest_dir) self._tester.Run() for filename in self._tester.results_src: self.assertCommandContains(['scp', 'root@localhost:%s' % filename, self._tester.results_dest_dir]) def", "files_from = self.TempFilePath('file_list') self.assertEqual(files, cros_test.FileList(files, files_from)) # Ensure FileList uses 'files_from' and ignores", "# Ensure that the runtime dependencies are checked for. self.assertCommandContains(['gn', 'desc', build_dir, test_label,", "for the duration of a test.\"\"\" return os.path.join(self.tempdir, file_path) class CrOSTester(CrOSTesterBase): \"\"\"Tests miscellaneous", "src/dest directories.\"\"\" # Parser error if --results-src is not absolute. self.CheckParserError(['--results-src', 'tmp/results'], 'absolute')", "'chromeos-base') tast_bin_dir = os.path.join(tast_cache_dir, 'tast-cmd/usr/bin') osutils.SafeMakedirs(tast_bin_dir) self._tester.Run() self.assertCommandContains([ os.path.join(tast_bin_dir, 'tast'), 'run', '-build=false', '-waituntilready',", "} }\"\"\" % (test_exe, test_label), makedirs=True) self._tester.build = True self._tester.deploy = True self._tester.chrome_test_target", "commands using rsync to copy.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:' + test_exe", "\"\"\"Verify a run device cmd call.\"\"\" self._tester.remote_cmd = True self._tester.files = [self.TempFilePath('crypto_unittests')] osutils.Touch(self._tester.files[0],", "self.assertCommandContains(['--nostrip', '--mount']) def testFetchResults(self): \"\"\"Verify that results files/directories are copied from the DUT.\"\"\"", "use sudo. with mock.patch.object(os, 'access', return_value=True): tester = cros_test.CrOSTest(opts) tester._device.use_sudo = False tester._device.board", "a temp directory on the target. \"\"\" self._tester.remote_cmd = True self._tester.args = ['/usr/local/autotest/bin/vm_sanity.py']", "'share/tast/data'), '-ephemeraldevserver=true', '-keyfile', '/tmp/.ssh/testing_rsa', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin' ]) class CrOSTesterChromeTest(CrOSTesterBase): \"\"\"Tests chrome test", "True self._tester.public_image = True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12900.0.0', }", "filename in self._tester.results_src: self.assertCommandContains(['scp', 'root@localhost:%s' % filename, self._tester.results_dest_dir]) def testFileList(self): \"\"\"Verify that FileList", "check_inside_chroot_mock.assert_called() self.assertCommandContains(['tast', '-verbose', 'run', '-build=false', '-waituntilready', '-timeout=100', '-resultsdir', '/tmp/results', '172.16.17.32', 'ui.ChromeLogin']) def testTastTestSDK(self):", "a file. filename = '/tmp/dest_dir_file' osutils.Touch(filename) self.CheckParserError(['--results-src', '/tmp/results', '--results-dest-dir', filename], 'existing file') def", "/dev/null', '172.16.17.32', 'accessibility_Sanity'], dryrun=False, enter_chroot=True) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=True) def testInsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an", "to check for. \"\"\" # Recreate args as a list if it is", "testParserErrorCommands(self): \"\"\"Verify we get parser errors when using certain commands.\"\"\" # Parser error", "'/etc/lsb-release'], 'should be a relative path') # Parser error when a file has", "A list of arguments of the particular chrome test. \"\"\" self._tester.args = [test_exe]", "+ ':9222', '--board', 'amd64-generic', '--cache-dir', self._tester.cache_dir]) def testDeployChromeWithArgs(self): \"\"\"Tests deploy chrome command with", "emulator version 2.6.0, Copyright (c) ' '2003-2008 <NAME>') self.rc.AddCmdResult(partial_mock.In('--version'), output=version_str) return tester def", "\"\"\"Verify running a single tast test.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.Run() self.assertCommandContains(['tast', 'run', '-build=false',", "a bad path. self.CheckParserError(['--files', '../some_file'], 'cannot start with ..') # Parser error when", "'/root/.ssh/', '/home/chronos/user/']) # Ensure chronos has ownership of the directory. self.assertCommandContains(['chown', '-R', 'chronos:',", "can grab the GPU if needed. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'stop ui'])", "Ensure command runs in the autotest directory. self.assertCommandContains('cd /usr/local/autotest && ./bin/vm_sanity.py') def testRunDeviceCmdWithoutSrcFiles(self):", "self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'dbus-send', '--system', '--type=method_call', '--dest=org.chromium.PowerManager', '/org/chromium/PowerManager', 'org.chromium.PowerManager.HandleUserActivity', 'int32:0']) args", "if no results destination dir is given. self.CheckParserError(['--results-src', '/tmp/results'], 'with results-src') # Parser", "test. test_label: The label of the chrome test. build_dir: The directory where chrome", "chrome test. test_label: The label of the chrome test. test_args: A list of", "self.TempFilePath('out_amd64-generic/Release') self._tester.nostrip = True self._tester.mount = True self._tester.Run() self.assertCommandContains(['--nostrip', '--mount']) def testFetchResults(self): \"\"\"Verify", "return_value=False) def testChromeTestSCP(self, rsync_mock): \"\"\"Verify build/deploy and chrome test commands using scp to", "files = ['/tmp/filename1', '/tmp/filename2'] self.assertEqual(files, cros_test.FileList(files, None)) # Ensure FileList returns files when", "few files used by the chrome test. runtime_deps = [ './%s' % test_exe,", "name of the chrome test. test_label: The label of the chrome test. build_dir:", "print_function import os import sys import mock import pytest # pylint: disable=import-error from", "'absolute') # Parser error if no results destination dir is given. self.CheckParserError(['--results-src', '/tmp/results'],", "self._tester = self.createTester() def TempFilePath(self, file_path): \"\"\"Creates a temporary file path lasting for", "'-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin']) def testExpressionBaseTastTest(self): \"\"\"Verify running a set of tast tests", "= True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.nostrip = True self._tester.mount = True self._tester.Run() self.assertCommandContains(['--nostrip',", "that had previously been mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir = '/mnt/host/source/test_results'", "ensure an # exception is not raised if it fails. self.assertCommandCalled( ['tast', 'run',", "return_value=False) def testOutsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an autotest from outside the chroot.\"\"\" #", "if no test command is provided. self.CheckParserError('--remote-cmd', 'specify test command') # Parser error", "self.TempFilePath('out_amd64-generic/Release') self._tester.Run() self.assertCommandContains(['deploy_chrome', '--force', '--build-dir', self._tester.build_dir, '--process-timeout', '180', '--device', self._tester._device.device + ':9222', '--board',", "bad path. self.CheckParserError(['--files', '../some_file'], 'cannot start with ..') # Parser error when a", "a set of tast tests with an expression.\"\"\" self._tester.tast = [ '((\"dep:chrome\" ||", "self._tester._device.ssh_port = None self._tester._device.device = '172.16.17.32' self._tester.test_that_args = ['--test_that-args', '--allow-chrome-crashes'] cwd = os.path.join('/mnt/host/source',", "self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.Run() self.assertCommandContains(['deploy_chrome', '--force', '--build-dir', self._tester.build_dir, '--process-timeout', '180', '--device', self._tester._device.device +", "Parser error if the cwd refers to a parent path. self.CheckParserError(['--cwd', '../new_cwd'], 'cwd", "testAutotestWithArgs(self): \"\"\"Tests an autotest call with attributes.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir = 'test_results'", "'-verbose', 'run', '-build=false', '-waituntilready', '-timeout=100', '-resultsdir', '/tmp/results', '172.16.17.32', 'ui.ChromeLogin']) def testTastTestSDK(self): \"\"\"Verify running", "error if the cwd is not an absolute path. self.CheckParserError(['--cwd', 'tmp/cwd'], 'cwd must", "import cros_test from chromite.lib import cros_test_lib from chromite.lib import osutils from chromite.lib import", "output.GetStderr()) def testParserErrorChromeTest(self): \"\"\"Verify we get a parser error for --chrome-test when no", "requires Python 3.6+' # pylint: disable=protected-access class CrOSTesterBase(cros_test_lib.RunCommandTempDirTestCase): \"\"\"Base class for setup and", "not absolute. self.CheckParserError(['--results-src', 'tmp/results'], 'absolute') # Parser error if no results destination dir", "= self.TempFilePath('file_list') self.assertEqual(files, cros_test.FileList(files, files_from)) # Ensure FileList uses 'files_from' and ignores 'files'.", "is not an existing directory. self.CheckParserError(['--deploy', '--build-dir', '/not/a/directory'], 'not a directory') def testParserErrorResultsSrc(self):", "def testSingleBaseTastTest(self): \"\"\"Verify running a single tast test.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.Run() self.assertCommandContains(['tast',", "'-p', '9222', 'root@localhost', '--', 'dbus-send', '--system', '--type=method_call', '--dest=org.chromium.PowerManager', '/org/chromium/PowerManager', 'org.chromium.PowerManager.HandleUserActivity', 'int32:0']) args =", "' '2003-2008 <NAME>') self.rc.AddCmdResult(partial_mock.In('--version'), output=version_str) return tester def setUp(self): \"\"\"Common set up method", "'/tmp/results', '172.16.17.32', 'ui.ChromeLogin']) def testTastTestSDK(self): \"\"\"Verify running tast tests from the SimpleChrome SDK.\"\"\"", "arg is given.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:' + test_exe test_args =", "['/tmp/results/cmd_results', '/tmp/results/filename.txt', '/tmp/results/test_results'] self._tester.results_dest_dir = self.TempFilePath('results_dir') osutils.SafeMakedirs(self._tester.results_dest_dir) self._tester.Run() for filename in self._tester.results_src: self.assertCommandContains(['scp',", "' accessibility_Sanity'), self.caplog.text) class CrOSTesterTast(CrOSTesterBase): \"\"\"Tests tast test cases.\"\"\" def testSingleBaseTastTest(self): \"\"\"Verify running", "def testFetchResults(self): \"\"\"Verify that results files/directories are copied from the DUT.\"\"\" self._tester.results_src =", "sys.version_info >= (3, 6), 'This module requires Python 3.6+' # pylint: disable=protected-access class", "Parser error if no test command is provided. self.CheckParserError('--remote-cmd', 'specify test command') #", "createTester(self, opts=None): \"\"\"Builds a CrOSTest suitable for testing. Args: opts: Cmd-line args to", "called. # TODO(crbug/1065172): Invalid assertion that had previously been mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest", "self._tester.results_src: self.assertCommandContains(['scp', 'root@localhost:%s' % filename, self._tester.results_dest_dir]) def testFileList(self): \"\"\"Verify that FileList returns the", "osutils.WriteFile(files_from, '\\n'.join(file_list)) self.assertEqual(file_list, cros_test.FileList(files, files_from)) class CrOSTesterMiscTests(CrOSTesterBase): \"\"\"Tests miscellaneous test cases.\"\"\" @mock.patch('chromite.lib.vm.VM.IsRunning', return_value=True)", "= '/tmp/.ssh/testing_rsa' tast_cache_dir = cros_test_lib.FakeSDKCache( self._tester.cache_dir).CreateCacheReference( self._tester._device.board, 'chromeos-base') tast_bin_dir = os.path.join(tast_cache_dir, 'tast-cmd/usr/bin') osutils.SafeMakedirs(tast_bin_dir)", "the parsed options. build_dir = cros_test.ParseCommandLine( ['--chrome-test', '--', test_dir]).build_dir self.assertEqual(build_dir, os.path.dirname(test_dir)) def testParserErrorBuild(self):", "that results files/directories are copied from the DUT.\"\"\" self._tester.results_src = ['/tmp/results/cmd_results', '/tmp/results/filename.txt', '/tmp/results/test_results']", "latest canary. self._tester.flash = True self._tester.public_image = True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release =", "'--results-dest-dir', filename], 'existing file') def testParserErrorCommands(self): \"\"\"Verify we get parser errors when using", "setup and creating a temp file path.\"\"\" def createTester(self, opts=None): \"\"\"Builds a CrOSTest", "# We check if /dev/kvm is writeable to use sudo. with mock.patch.object(os, 'access',", "VM is started before running tests.\"\"\" self._tester.start_vm = True self._tester.Run() # Check if", "test commands using rsync to copy.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:' +", "from chromite.lib import osutils from chromite.lib import partial_mock from chromite.scripts import cros_set_lsb_release from", "errors for results src/dest directories.\"\"\" # Parser error if --results-src is not absolute.", "not change the working directory or create a temp directory on the target.", "self._tester.build_dir = '/some/chromium/dir' self._tester.args = ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'] self._tester.Run() # Ensure command", "to be an executable. osutils.Touch(os.path.join(self._tester.build_dir, runtime_deps[0]), mode=0o700) for dep in runtime_deps[1:]: osutils.Touch(os.path.join(self._tester.build_dir, dep),", "Parser error when --files-from does not exist. self.CheckParserError(['--files-from', '/fake/file'], 'is not a file')", "has an absolute path. self.CheckParserError(['--files', '/etc/lsb-release'], 'should be a relative path') # Parser", "BSD-style license that can be # found in the LICENSE file. \"\"\"Unit tests", "'--build-dir') # Parser error if build directory is not an existing directory. self.CheckParserError(['--deploy',", "'-build=false', '-waituntilready', '-timeout=100', '-resultsdir', '/tmp/results', '172.16.17.32', 'ui.ChromeLogin']) def testTastTestSDK(self): \"\"\"Verify running tast tests", "the DUT.\"\"\" self._tester.results_src = ['/tmp/results/cmd_results', '/tmp/results/filename.txt', '/tmp/results/test_results'] self._tester.results_dest_dir = self.TempFilePath('results_dir') osutils.SafeMakedirs(self._tester.results_dest_dir) self._tester.Run() for", "running tast tests from the SimpleChrome SDK.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester._device.private_key = '/tmp/.ssh/testing_rsa'", "cases.\"\"\" def testSingleBaseTastTest(self): \"\"\"Verify running a single tast test.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.Run()", "opts else []) opts.enable_kvm = True # We check if /dev/kvm is writeable", "# Checks that mock version has been called. # TODO(crbug/1065172): Invalid assertion that", "autotest from outside the chroot.\"\"\" # Checks that mock version has been called.", "for setup and creating a temp file path.\"\"\" def createTester(self, opts=None): \"\"\"Builds a", "{ \"label\": \"%s\", \"type\": \"console_test_launcher\", } }\"\"\" % (test_exe, test_label), makedirs=True) self._tester.build =", "sys import mock import pytest # pylint: disable=import-error from chromite.lib import constants from", "testParserErrorResultsSrc(self): \"\"\"Verify parser errors for results src/dest directories.\"\"\" # Parser error if --results-src", "not exist. self.CheckParserError(['--files-from', '/fake/file'], 'is not a file') # Parser error when a", "directory on the target. \"\"\" self._tester.remote_cmd = True self._tester.args = ['/usr/local/autotest/bin/vm_sanity.py'] self._tester.Run() self.assertCommandContains(['ssh',", "def testStartVM(self): \"\"\"Verify that a new VM is started before running tests.\"\"\" self._tester.start_vm", "'-p', '9222', 'root@localhost', '--', 'true']) def testStartVMCustomPort(self): \"\"\"Verify that a custom SSH port", "\"\"\"Tests an autotest call with attributes.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir = 'test_results' self._tester._device.private_key", "\"\"\"Base class for setup and creating a temp file path.\"\"\" def createTester(self, opts=None):", "testChromeTestExeArg(self): \"\"\"Verify build/deploy and chrome test commands when a test arg is given.\"\"\"", "chromite.lib import cros_test from chromite.lib import cros_test_lib from chromite.lib import osutils from chromite.lib", "test_that. self.assertIn(('cros_sdk -- test_that --board amd64-generic --no-quickmerge' \" --ssh_options '-F /dev/null -i /dev/null'", "self._tester.remote_cmd = True self._tester.files = [self.TempFilePath('crypto_unittests')] osutils.Touch(self._tester.files[0], mode=0o700) self._tester.as_chronos = True self._tester.args =", "filename], 'existing file') def testParserErrorCommands(self): \"\"\"Verify we get parser errors when using certain", "with mock.patch.object(os, 'access', return_value=True): tester = cros_test.CrOSTest(opts) tester._device.use_sudo = False tester._device.board = 'amd64-generic'", "self.assertCommandContains(['chown', '-R', 'chronos:', '/usr/local/cros_test']) # Ensure command runs in the target directory. self.assertCommandContains('cd", "target. \"\"\" self._tester.remote_cmd = True self._tester.args = ['/usr/local/autotest/bin/vm_sanity.py'] self._tester.Run() self.assertCommandContains(['ssh', '-p', '9222', '/usr/local/autotest/bin/vm_sanity.py'])", "'should be a relative path') # Parser error when a file has a", "self.assertCommandContains([ 'tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', '((\"dep:chrome\" || \"dep:android\") && !flaky &&", "the custom port when talking to the VM. self.assertCommandContains( ['ssh', '-p', '12345', 'root@localhost',", "'--results_dir', test_results_dir, '--ssh_private_key', testing_rsa_dir, '--debug', '--allow-chrome-crashes', '--no-quickmerge', '--ssh_options', '-F /dev/null -i /dev/null', '172.16.17.32',", "_check_inside_chroot_mock): \"\"\"Tests running an autotest from within the chroot.\"\"\" # Checks that mock", "test_args else [test_exe] self._tester.chrome_test = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') osutils.SafeMakedirs(self._tester.build_dir) isolate_map = self.TempFilePath('testing/buildbot/gn_isolate_map.pyl')", "'localhost:9222', 'ui.ChromeLogin']) def testExpressionBaseTastTest(self): \"\"\"Verify running a set of tast tests with an", "directory is specified. self.CheckParserError('--build', '--build-dir') # Parser error if build directory is not", "be a relative path') # Parser error when a file has a bad", "\"\"\"Tests flash command.\"\"\" # Verify that specifying the board gets the latest canary.", "the chrome test. test_args: A list of arguments of the particular chrome test.", "that parser error is raised. Args: args: List of commandline arguments. error_msg: Error", "VM is responsive. self.assertCommandContains( ['ssh', '-p', '9222', 'root@localhost', '--', 'true']) def testStartVMCustomPort(self): \"\"\"Verify", "'cros'), 'flash', 'localhost', 'xbuddy://remote/octopus/R82-12901.0.0'], expected=False) def testDeployChrome(self): \"\"\"Tests basic deploy chrome command.\"\"\" self._tester.deploy", "100 self._tester._device.log_level = 'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port = None self._tester._device.device = '172.16.17.32'", "\"label\": \"%s\", \"type\": \"console_test_launcher\", } }\"\"\" % (test_exe, test_label), makedirs=True) self._tester.build = True", "for CrOSTest.\"\"\" from __future__ import print_function import os import sys import mock import", "env var for the build dir, and ensure an # exception is not", "code is governed by a BSD-style license that can be # found in", "the working directory or create a temp directory on the target. \"\"\" self._tester.remote_cmd", "the cwd refers to a parent path. self.CheckParserError(['--cwd', '../new_cwd'], 'cwd cannot start with", "self.TempFilePath('out_amd64-generic/Release/crypto_unittests') # Retrieves the build directory from the parsed options. build_dir = cros_test.ParseCommandLine(", "isrunning_mock): \"\"\"Tests basic functionality.\"\"\" self._tester.Run() isrunning_mock.assert_called() # Run vm_sanity. self.assertCommandContains([ 'ssh', '-p', '9222',", "args don't start with --. self.CheckParserError(['--host-cmd', 'tast', 'run'], 'must start with') def testParserErrorCWD(self):", "errors with --files.\"\"\" # Parser error when both --files and --files-from are specified.", "cases.\"\"\" @mock.patch('chromite.lib.vm.VM.IsRunning', return_value=True) def testBasic(self, isrunning_mock): \"\"\"Tests basic functionality.\"\"\" self._tester.Run() isrunning_mock.assert_called() # Run", "'--', '/usr/local/autotest/bin/vm_sanity.py' ]) def testCatapult(self): \"\"\"Verify catapult test command.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.Run()", "'access', return_value=True): tester = cros_test.CrOSTest(opts) tester._device.use_sudo = False tester._device.board = 'amd64-generic' tester._device.image_path =", "'/home/chronos/user/']) # Ensure chronos has ownership of the directory. self.assertCommandContains(['chown', '-R', 'chronos:', '/usr/local/cros_test'])", "to the device using rsync. self.assertCommandContains(['rsync', '%s/' % self._tester.staging_dir, '[root@localhost]:/usr/local/chrome_test']) @mock.patch('chromite.lib.remote_access.RemoteDevice.HasRsync', return_value=False) def", "Creates the test_exe to be an executable. osutils.Touch(os.path.join(self._tester.build_dir, runtime_deps[0]), mode=0o700) for dep in", "'cwd must be an absolute path') def testParserErrorFiles(self): \"\"\"Verify we get parser errors", "'/usr/local/autotest/bin/vm_sanity.py']) self.assertCommandContains(['mkdir', '-p'], expected=False) self.assertCommandContains(['cd %s && /usr/local/autotest/bin/' 'vm_sanity.py' % self._tester.cwd], expected=False) self.assertCommandContains(['rm',", "--files has an absolute path. self.CheckParserError(['--files', '/etc/lsb-release'], 'should be a relative path') #", "are checked for. self.assertCommandContains(['gn', 'desc', build_dir, test_label, 'runtime_deps']) # Ensure UI is stopped", "call.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.Run() # Check VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) #", "build/deploy and chrome test commands when a test arg is given.\"\"\" test_exe =", "self._tester._RunAutotest() # Check that we enter the chroot before running test_that. self.assertIn(('cros_sdk --", "parsed options. build_dir = cros_test.ParseCommandLine( ['--chrome-test', '--', test_dir]).build_dir self.assertEqual(build_dir, os.path.dirname(test_dir)) def testParserErrorBuild(self): \"\"\"Verify", "self.CheckParserError(['--cwd', 'tmp/cwd'], 'cwd must be an absolute path') def testParserErrorFiles(self): \"\"\"Verify we get", "that specifying the board gets the latest canary. self._tester.flash = True self._tester.public_image =", "def testParserErrorCWD(self): \"\"\"Verify we get parser errors when specifying the cwd.\"\"\" # Parser", "'%s/' % self._tester.staging_dir, '[root@localhost]:/usr/local/chrome_test']) @mock.patch('chromite.lib.remote_access.RemoteDevice.HasRsync', return_value=False) def testChromeTestSCP(self, rsync_mock): \"\"\"Verify build/deploy and chrome", "commands ran properly. Args: test_exe: The name of the chrome test. test_label: The", "the particular chrome test. \"\"\" self._tester.args = [test_exe] + test_args if test_args else", "not an absolute path. self.CheckParserError(['--cwd', 'tmp/cwd'], 'cwd must be an absolute path') def", "build_dir, test_args=None): \"\"\"Checks to see that chrome test commands ran properly. Args: test_exe:", "directory. self.assertCommandContains('cd /usr/local/cros_test && crypto_unittests ' '--test-launcher-print-test-stdio=always') # Ensure target directory is removed", "testStartVMCustomPort(self): \"\"\"Verify that a custom SSH port is supported for tests.\"\"\" self._tester =", "within the chroot.\"\"\" # Checks that mock version has been called. # TODO(crbug/1065172):", "'-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', '((\"dep:chrome\" || \"dep:android\") && !flaky && !disabled)' ]) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot') def", "'-C', build_dir, test_exe]) # Ensure that the runtime dependencies are checked for. self.assertCommandContains(['gn',", "both --files and --files-from are specified. self.CheckParserError(['--files', 'file_list', '--files-from', 'file'], '--files and --files-from')", "= None self._tester._device.device = '172.16.17.32' self._tester.test_that_args = ['--test_that-args', '--allow-chrome-crashes'] cwd = os.path.join('/mnt/host/source', os.path.relpath(os.getcwd(),", "'--test-launcher-print-test-stdio=always') # Ensure target directory is removed at the end of the test.", "cros_test.CrOSTest. \"\"\" opts = cros_test.ParseCommandLine(opts if opts else []) opts.enable_kvm = True #", "when a non-existent file is passed to --files. self.CheckParserError(['--files', 'fake/file'], 'does not exist')", "self._tester = self.createTester(opts=['--ssh-port=12345']) self._tester.start_vm = True self._tester.Run() # Check that we use the", "'/tmp/dest_dir_file' osutils.Touch(filename) self.CheckParserError(['--results-src', '/tmp/results', '--results-dest-dir', filename], 'existing file') def testParserErrorCommands(self): \"\"\"Verify we get", "if opts else []) opts.enable_kvm = True # We check if /dev/kvm is", "directory. self.CheckParserError(['--deploy', '--build-dir', '/not/a/directory'], 'not a directory') def testParserErrorResultsSrc(self): \"\"\"Verify parser errors for", "an existing directory. self.CheckParserError(['--deploy', '--build-dir', '/not/a/directory'], 'not a directory') def testParserErrorResultsSrc(self): \"\"\"Verify parser", "% os.path.join(tast_bin_dir, 'remote_test_runner'), '-remotebundledir=%s' % os.path.join(tast_cache_dir, 'tast-remote-tests-cros/usr', 'libexec/tast/bundles/remote'), '-remotedatadir=%s' % os.path.join( tast_cache_dir, 'tast-remote-tests-cros/usr',", "Ensure FileList returns files when files_from does not exist. files_from = self.TempFilePath('file_list') self.assertEqual(files,", "'localhost', 'xbuddy://remote/octopus/R82-12901.0.0'], expected=False) def testDeployChrome(self): \"\"\"Tests basic deploy chrome command.\"\"\" self._tester.deploy = True", "testParserErrorFiles(self): \"\"\"Verify we get parser errors with --files.\"\"\" # Parser error when both", "'remote_test_runner'), '-remotebundledir=%s' % os.path.join(tast_cache_dir, 'tast-remote-tests-cros/usr', 'libexec/tast/bundles/remote'), '-remotedatadir=%s' % os.path.join( tast_cache_dir, 'tast-remote-tests-cros/usr', 'share/tast/data'), '-ephemeraldevserver=true',", "return tester def setUp(self): \"\"\"Common set up method for all tests.\"\"\" self._tester =", "'test_that', '--no-quickmerge', '--ssh_options', '-F /dev/null -i /dev/null', 'localhost:9222', 'accessibility_Sanity']) def testAutotestWithArgs(self): \"\"\"Tests an", "'--device', self._tester._device.device + ':9222', '--board', 'amd64-generic', '--cache-dir', self._tester.cache_dir]) def testDeployChromeWithArgs(self): \"\"\"Tests deploy chrome", "# -*- coding: utf-8 -*- # Copyright 2019 The Chromium OS Authors. All", "cros_test.FileList(files, None)) # Ensure FileList returns files when files_from does not exist. files_from", "testRunDeviceCmd(self): \"\"\"Verify a run device cmd call.\"\"\" self._tester.remote_cmd = True self._tester.files = [self.TempFilePath('crypto_unittests')]", "self.assertCommandCalled( ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'], check=False, dryrun=False, extra_env={'CHROMIUM_OUTPUT_DIR': '/some/chromium/dir'}) # Ensure that --host-cmd", "# Parser error when a file in --files has an absolute path. self.CheckParserError(['--files',", "destination dir is given. self.CheckParserError(['--results-src', '/tmp/results'], 'with results-src') # Parser error if no", "custom port when talking to the VM. self.assertCommandContains( ['ssh', '-p', '12345', 'root@localhost', '--',", "\"\"\"Verify running a host command.\"\"\" self._tester.host_cmd = True self._tester.build_dir = '/some/chromium/dir' self._tester.args =", "the chroot.\"\"\" # Checks that mock version has been called. # TODO(crbug/1065172): Invalid", "target directory is removed at the end of the test. self.assertCommandContains(['rm', '-rf', '/usr/local/cros_test'])", "# Specify an xbuddy link. self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash',", "# Ensure command runs in the target directory. self.assertCommandContains('cd /usr/local/cros_test && crypto_unittests '", "'localhost:9222', 'ui.ChromeLogin'], check=False, dryrun=False, extra_env={'CHROMIUM_OUTPUT_DIR': '/some/chromium/dir'}) # Ensure that --host-cmd does not invoke", "'xbuddy://remote/octopus/R82-12901.0.0']) def testFlashSkip(self): \"\"\"Tests flash command is skipped when not needed.\"\"\" self._tester.flash =", "there are args, but no command. self.CheckParserError('--some_test some_command', '--remote-cmd or --host-cmd or --chrome-test')", "for results src/dest directories.\"\"\" # Parser error if --results-src is not absolute. self.CheckParserError(['--results-src',", "test. test_args: A list of arguments of the particular chrome test. \"\"\" self._tester.args", "message to check for. \"\"\" # Recreate args as a list if it", "# Check that we use the custom port when talking to the VM.", "self._tester.Run() check_inside_chroot_mock.assert_called() self.assertCommandContains(['tast', '-verbose', 'run', '-build=false', '-waituntilready', '-timeout=100', '-resultsdir', '/tmp/results', '172.16.17.32', 'ui.ChromeLogin']) def", "path') # Parser error when a file has a bad path. self.CheckParserError(['--files', '../some_file'],", "'/usr/local/chrome_test' # test_label looks like //crypto:crypto_unittests. # label_root extracts 'crypto' from the test_label", "'-r', '/root/.ssh/', '/home/chronos/user/']) # Ensure chronos has ownership of the directory. self.assertCommandContains(['chown', '-R',", "that mock version has been called. # TODO(crbug/1065172): Invalid assertion that had previously", "suitable for testing. Args: opts: Cmd-line args to cros_test used to build a", "cros_test.CrOSTest(opts) tester._device.use_sudo = False tester._device.board = 'amd64-generic' tester._device.image_path = self.TempFilePath( 'chromiumos_qemu_image.bin') osutils.Touch(tester._device.image_path) version_str", "&& ./bin/vm_sanity.py') def testRunDeviceCmdWithoutSrcFiles(self): \"\"\"Verify running a remote command when src files are", "os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus-full/latest',]) # Specify an xbuddy link. self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0'", "testBasicAutotest(self): \"\"\"Tests a simple autotest call.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.Run() # Check VM", "self._tester._device.device = '172.16.17.32' self._tester.results_dir = '/tmp/results' self._tester.Run() check_inside_chroot_mock.assert_called() self.assertCommandContains(['tast', '-verbose', 'run', '-build=false', '-waituntilready',", "of commandline arguments. error_msg: Error message to check for. \"\"\" # Recreate args", "'--results_dir', '/mnt/host/source/test_results', '--ssh_private_key', '/mnt/host/source/.ssh/testing_rsa']) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=False) def testOutsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an autotest", "DUT.\"\"\" self._tester.results_src = ['/tmp/results/cmd_results', '/tmp/results/filename.txt', '/tmp/results/test_results'] self._tester.results_dest_dir = self.TempFilePath('results_dir') osutils.SafeMakedirs(self._tester.results_dest_dir) self._tester.Run() for filename", "self.assertCommandContains(['cd %s && /usr/local/autotest/bin/' 'vm_sanity.py' % self._tester.cwd], expected=False) self.assertCommandContains(['rm', '-rf'], expected=False) def testHostCmd(self):", "'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port = None self._tester._device.device = '172.16.17.32' self._tester.results_dir = '/tmp/results'", "correct browser in guest mode.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.guest = True self._tester.Run() self.assertCommandContains([", "'/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system', 'testAddResults' ]) def testCatapultAsGuest(self): \"\"\"Verify that we use the correct", "results source is given. self.CheckParserError(['--results-dest-dir', '/tmp/dest_dir'], 'with results-dest-dir') # Parser error if results", "class CrOSTesterAutotest(CrOSTesterBase): \"\"\"Tests autotest test cases.\"\"\" def testBasicAutotest(self): \"\"\"Tests a simple autotest call.\"\"\"", "enter the chroot before running test_that. self.assertIn(('cros_sdk -- test_that --board amd64-generic --no-quickmerge' \"", "'cwd cannot start with ..') # Parser error if the cwd is not", "&& crypto_unittests ' '--test-launcher-print-test-stdio=always') # Ensure target directory is removed at the end", "fails. self.assertCommandCalled( ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'], check=False, dryrun=False, extra_env={'CHROMIUM_OUTPUT_DIR': '/some/chromium/dir'}) # Ensure that", "error_msg): \"\"\"Checks that parser error is raised. Args: args: List of commandline arguments.", "'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12900.0.0', } self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222',", "'-p', '12345', 'root@localhost', '--', 'true']) def testFlash(self): \"\"\"Tests flash command.\"\"\" # Verify that", "# Capture the run command. This is necessary beacuse the mock doesn't #", "cros_test_lib.FakeSDKCache( self._tester.cache_dir).CreateCacheReference( self._tester._device.board, 'chromeos-base') tast_bin_dir = os.path.join(tast_cache_dir, 'tast-cmd/usr/bin') osutils.SafeMakedirs(tast_bin_dir) self._tester.Run() self.assertCommandContains([ os.path.join(tast_bin_dir, 'tast'),", "'run'], 'must start with') def testParserErrorCWD(self): \"\"\"Verify we get parser errors when specifying", "'/tmp/results/filename.txt', '/tmp/results/test_results'] self._tester.results_dest_dir = self.TempFilePath('results_dir') osutils.SafeMakedirs(self._tester.results_dest_dir) self._tester.Run() for filename in self._tester.results_src: self.assertCommandContains(['scp', 'root@localhost:%s'", "directory is not an existing directory. self.CheckParserError(['--deploy', '--build-dir', '/not/a/directory'], 'not a directory') def", "end of the test. self.assertCommandContains(['rm', '-rf', '/usr/local/cros_test']) def testRunDeviceCmdWithSetCwd(self): \"\"\"Verify a run device", "'file'], '--files and --files-from') # Parser error when --files-from does not exist. self.CheckParserError(['--files-from',", "VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Checks that autotest is running. self.assertCommandContains([ 'test_that',", "use the custom port when talking to the VM. self.assertCommandContains( ['ssh', '-p', '12345',", "'/tmp/file3'] osutils.WriteFile(files_from, '\\n'.join(file_list)) self.assertEqual(file_list, cros_test.FileList(files, files_from)) class CrOSTesterMiscTests(CrOSTesterBase): \"\"\"Tests miscellaneous test cases.\"\"\" @mock.patch('chromite.lib.vm.VM.IsRunning',", "self._tester.Run() # Check that we use the custom port when talking to the", "# Ensure a user activity ping is sent to the device. self.assertCommandContains(['ssh', '-p',", "is created on the DUT. self.assertCommandContains(['mkdir', '-p', '/usr/local/cros_test']) # Ensure test ssh keys", "and chrome test commands using rsync to copy.\"\"\" test_exe = 'crypto_unittests' test_label =", "chronos without a test command. self.CheckParserError('--as-chronos', 'as-chronos') # Parser error if there are", "--board amd64-generic --no-quickmerge' \" --ssh_options '-F /dev/null -i /dev/null' localhost:9222\" ' accessibility_Sanity'), self.caplog.text)", "'\"out_amd64-generic/Release/%s %s\"' % (test_exe, args)]) def testChromeTestRsync(self): \"\"\"Verify build/deploy and chrome test commands", "self._tester.catapult_tests = ['testAddResults'] self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system', 'testAddResults' ]) def testCatapultAsGuest(self):", "coding: utf-8 -*- # Copyright 2019 The Chromium OS Authors. All rights reserved.", "autotest from within the chroot.\"\"\" # Checks that mock version has been called.", "if /dev/kvm is writeable to use sudo. with mock.patch.object(os, 'access', return_value=True): tester =", "osutils.Touch(os.path.join(self._tester.build_dir, runtime_deps[0]), mode=0o700) for dep in runtime_deps[1:]: osutils.Touch(os.path.join(self._tester.build_dir, dep), makedirs=True) # Mocks the", "are copied from the DUT.\"\"\" self._tester.results_src = ['/tmp/results/cmd_results', '/tmp/results/filename.txt', '/tmp/results/test_results'] self._tester.results_dest_dir = self.TempFilePath('results_dir')", "'/org/chromium/PowerManager', 'org.chromium.PowerManager.HandleUserActivity', 'int32:0']) args = ' '.join(test_args) if test_args else '' # Ensure", "port is supported for tests.\"\"\" self._tester = self.createTester(opts=['--ssh-port=12345']) self._tester.start_vm = True self._tester.Run() #", "'accessibility_Sanity'], dryrun=False, enter_chroot=True) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=True) def testInsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an autotest from", "= ['ui.ChromeLogin'] self._tester._device.private_key = '/tmp/.ssh/testing_rsa' tast_cache_dir = cros_test_lib.FakeSDKCache( self._tester.cache_dir).CreateCacheReference( self._tester._device.board, 'chromeos-base') tast_bin_dir =", "scp to copy.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:' + test_exe self.SetUpChromeTest(test_exe, test_label)", "on the host. self.assertCommandContains(['ssh', 'tast'], expected=False) @pytest.mark.usefixtures('testcase_caplog') class CrOSTesterAutotest(CrOSTesterBase): \"\"\"Tests autotest test cases.\"\"\"", "self.assertCommandContains(['scp', '%s/' % self._tester.staging_dir, 'root@localhost:/usr/local/chrome_test']) rsync_mock.assert_called() def testChromeTestExeArg(self): \"\"\"Verify build/deploy and chrome test", "checked for. self.assertCommandContains(['gn', 'desc', build_dir, test_label, 'runtime_deps']) # Ensure UI is stopped so", "test_that --board amd64-generic --no-quickmerge' \" --ssh_options '-F /dev/null -i /dev/null' localhost:9222\" ' accessibility_Sanity'),", "'-p', '/usr/local/cros_test']) # Ensure test ssh keys are authorized with chronos. self.assertCommandContains(['cp', '-r',", "# Ensure test ssh keys are authorized with chronos. self.assertCommandContains(['cp', '-r', '/root/.ssh/', '/home/chronos/user/'])", "before running test_that. self.assertIn(('cros_sdk -- test_that --board amd64-generic --no-quickmerge' \" --ssh_options '-F /dev/null", "!flaky && !disabled)' ]) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot') def testTastTestWithOtherArgs(self, check_inside_chroot_mock): \"\"\"Verify running a single tast", "test_exe = 'crypto_unittests' test_label = '//crypto:' + test_exe test_args = ['--test-launcher-print-test-stdio=auto'] self.SetUpChromeTest(test_exe, test_label,", "building/deploying Chrome.\"\"\" # Parser error if no build directory is specified. self.CheckParserError('--build', '--build-dir')", "assertion that had previously been mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] # Capture", "'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system', 'testAddResults' ]) def testCatapultAsGuest(self): \"\"\"Verify that we use the", "test. \"\"\" self._tester.args = [test_exe] + test_args if test_args else [test_exe] self._tester.chrome_test =", "'with results-src') # Parser error if no results source is given. self.CheckParserError(['--results-dest-dir', '/tmp/dest_dir'],", "rights reserved. # Use of this source code is governed by a BSD-style", "self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir = 'test_results' self._tester._device.private_key = '.ssh/testing_rsa' self._tester._device.log_level = 'debug' self._tester._device.should_start_vm", "'.ssh/testing_rsa') self._tester._RunAutotest() self.assertCommandCalled( ['test_that', '--board', 'amd64-generic', '--results_dir', test_results_dir, '--ssh_private_key', testing_rsa_dir, '--debug', '--allow-chrome-crashes', '--no-quickmerge',", "no test command is provided. self.CheckParserError('--remote-cmd', 'specify test command') # Parser error if", "runtime_deps[1:]: osutils.Touch(os.path.join(self._tester.build_dir, dep), makedirs=True) # Mocks the output by providing necessary runtime files.", "the test can grab the GPU if needed. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--',", "# capture the cros_sdk wrapper. self._tester._RunAutotest() # Check that we enter the chroot", "of tast tests with an expression.\"\"\" self._tester.tast = [ '((\"dep:chrome\" || \"dep:android\") &&", "a run device cmd call.\"\"\" self._tester.remote_cmd = True self._tester.files = [self.TempFilePath('crypto_unittests')] osutils.Touch(self._tester.files[0], mode=0o700)", "is run. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'cd /usr/local/chrome_test && su chronos -c", "to the isolate map. osutils.WriteFile(isolate_map, \"\"\"{ \"%s\": { \"label\": \"%s\", \"type\": \"console_test_launcher\", }", "'172.16.17.32' self._tester.test_that_args = ['--test_that-args', '--allow-chrome-crashes'] cwd = os.path.join('/mnt/host/source', os.path.relpath(os.getcwd(), constants.SOURCE_ROOT)) test_results_dir = os.path.join(cwd,", "commandline arguments. error_msg: Error message to check for. \"\"\" # Recreate args as", "-i /dev/null', 'localhost:9222', 'accessibility_Sanity']) def testAutotestWithArgs(self): \"\"\"Tests an autotest call with attributes.\"\"\" self._tester.autotest", "Ensure that the runtime dependencies are checked for. self.assertCommandContains(['gn', 'desc', build_dir, test_label, 'runtime_deps'])", "'-rf', '/usr/local/cros_test']) def testRunDeviceCmdWithSetCwd(self): \"\"\"Verify a run device command call when giving a", "version_str = ('QEMU emulator version 2.6.0, Copyright (c) ' '2003-2008 <NAME>') self.rc.AddCmdResult(partial_mock.In('--version'), output=version_str)", "needed. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'stop ui']) # Ensure a user activity", "--no-quickmerge' \" --ssh_options '-F /dev/null -i /dev/null' localhost:9222\" ' accessibility_Sanity'), self.caplog.text) class CrOSTesterTast(CrOSTesterBase):", "test_exe self.SetUpChromeTest(test_exe, test_label) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir) # Ensure files are being copied", "self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus/R82-12901.0.0']) def testFlashSkip(self): \"\"\"Tests", "test_exe, 'gen.runtime/%s/%s/%s.runtime_deps' % (label_root, test_exe, test_exe), '../../third_party/chromite'] # Creates the test_exe to be", "test commands using scp to copy.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:' +", "is not an absolute path. self.CheckParserError(['--cwd', 'tmp/cwd'], 'cwd must be an absolute path')", "self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus-full/latest',]) # Specify an xbuddy link. self._tester.xbuddy =", "check for. \"\"\" # Recreate args as a list if it is given", "given. self.CheckParserError(['--results-src', '/tmp/results'], 'with results-src') # Parser error if no results source is", "chromite.lib import cros_test_lib from chromite.lib import osutils from chromite.lib import partial_mock from chromite.scripts", "'--type=method_call', '--dest=org.chromium.PowerManager', '/org/chromium/PowerManager', 'org.chromium.PowerManager.HandleUserActivity', 'int32:0']) args = ' '.join(test_args) if test_args else ''", "--. self.CheckParserError(['--host-cmd', 'tast', 'run'], 'must start with') def testParserErrorCWD(self): \"\"\"Verify we get parser", "'172.16.17.32' self._tester.results_dir = '/tmp/results' self._tester.Run() check_inside_chroot_mock.assert_called() self.assertCommandContains(['tast', '-verbose', 'run', '-build=false', '-waituntilready', '-timeout=100', '-resultsdir',", "cros_test.FileList(files, files_from)) class CrOSTesterMiscTests(CrOSTesterBase): \"\"\"Tests miscellaneous test cases.\"\"\" @mock.patch('chromite.lib.vm.VM.IsRunning', return_value=True) def testBasic(self, isrunning_mock):", "self._tester._device.should_start_vm = False self._tester._device.ssh_port = None self._tester._device.device = '172.16.17.32' self._tester.test_that_args = ['--test_that-args', '--allow-chrome-crashes']", "directory. self.assertCommandContains(['chown', '-R', 'chronos:', '/usr/local/cros_test']) # Ensure command runs in the target directory.", "running a single tast test.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.Run() self.assertCommandContains(['tast', 'run', '-build=false', '-waituntilready',", "destination dir is a file. filename = '/tmp/dest_dir_file' osutils.Touch(filename) self.CheckParserError(['--results-src', '/tmp/results', '--results-dest-dir', filename],", "is given.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:' + test_exe test_args = ['--test-launcher-print-test-stdio=auto']", "os.path.join(tast_cache_dir, 'tast-cmd/usr/bin') osutils.SafeMakedirs(tast_bin_dir) self._tester.Run() self.assertCommandContains([ os.path.join(tast_bin_dir, 'tast'), 'run', '-build=false', '-waituntilready', '-remoterunner=%s' % os.path.join(tast_bin_dir,", "to see that chrome test commands ran properly. Args: test_exe: The name of", "Chromium OS Authors. All rights reserved. # Use of this source code is", "specified. self.CheckParserError('--build', '--build-dir') # Parser error if build directory is not an existing", "def testRunDeviceCmdWithSetCwd(self): \"\"\"Verify a run device command call when giving a cwd.\"\"\" self._tester.remote_cmd", "6), 'This module requires Python 3.6+' # pylint: disable=protected-access class CrOSTesterBase(cros_test_lib.RunCommandTempDirTestCase): \"\"\"Base class", "= os.path.join(cwd, 'test_results') testing_rsa_dir = os.path.join(cwd, '.ssh/testing_rsa') self._tester._RunAutotest() self.assertCommandCalled( ['test_that', '--board', 'amd64-generic', '--results_dir',", "= True # We check if /dev/kvm is writeable to use sudo. with", "test_exe = 'crypto_unittests' test_label = '//crypto:' + test_exe self.SetUpChromeTest(test_exe, test_label) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label,", "chrome test test cases.\"\"\" def SetUpChromeTest(self, test_exe, test_label, test_args=None): \"\"\"Sets configurations necessary for", "['ui.ChromeLogin'] self._tester.test_timeout = 100 self._tester._device.log_level = 'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port = None", "self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir) # Ensure files are being copied over to the device", "'../../third_party/chromite'] # Creates the test_exe to be an executable. osutils.Touch(os.path.join(self._tester.build_dir, runtime_deps[0]), mode=0o700) for", "'-p', '9222', 'root@localhost', '--', 'cd /usr/local/chrome_test && su chronos -c -- ' '\"out_amd64-generic/Release/%s", "no results source is given. self.CheckParserError(['--results-dest-dir', '/tmp/dest_dir'], 'with results-dest-dir') # Parser error if", "chrome test to the isolate map. osutils.WriteFile(isolate_map, \"\"\"{ \"%s\": { \"label\": \"%s\", \"type\":", "providing necessary runtime files. self.rc.AddCmdResult( partial_mock.InOrder(['gn', 'desc', test_label]), output='\\n'.join(runtime_deps)) def CheckChromeTestCommands(self, test_exe, test_label,", "self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Check if new VM is responsive. self.assertCommandContains( ['ssh', '-p', '9222',", "is not absolute. self.CheckParserError(['--results-src', 'tmp/results'], 'absolute') # Parser error if no results destination", "device using scp. self.assertCommandContains(['scp', '%s/' % self._tester.staging_dir, 'root@localhost:/usr/local/chrome_test']) rsync_mock.assert_called() def testChromeTestExeArg(self): \"\"\"Verify build/deploy", "chromite.lib import partial_mock from chromite.scripts import cros_set_lsb_release from chromite.utils import outcap pytestmark =", "test. runtime_deps = [ './%s' % test_exe, 'gen.runtime/%s/%s/%s.runtime_deps' % (label_root, test_exe, test_exe), '../../third_party/chromite']", "chrome is built. test_args: Chrome test arguments. \"\"\" # Ensure chrome is being", "Parser error if no results source is given. self.CheckParserError(['--results-dest-dir', '/tmp/dest_dir'], 'with results-dest-dir') #", "remote command when src files are not specified. The remote command should not", "with chronos. self.assertCommandContains(['cp', '-r', '/root/.ssh/', '/home/chronos/user/']) # Ensure chronos has ownership of the", "self._tester.Run() self.assertCommandContains( [os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'localhost', 'xbuddy://remote/octopus/R82-12901.0.0'], expected=False) def testDeployChrome(self): \"\"\"Tests basic deploy", "'tast-remote-tests-cros/usr', 'share/tast/data'), '-ephemeraldevserver=true', '-keyfile', '/tmp/.ssh/testing_rsa', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin' ]) class CrOSTesterChromeTest(CrOSTesterBase): \"\"\"Tests chrome", "used by the chrome test. runtime_deps = [ './%s' % test_exe, 'gen.runtime/%s/%s/%s.runtime_deps' %", "removed at the end of the test. self.assertCommandContains(['rm', '-rf', '/usr/local/cros_test']) def testRunDeviceCmdWithSetCwd(self): \"\"\"Verify", "'-p', '9222', 'root@localhost', '--', '/usr/local/autotest/bin/vm_sanity.py' ]) def testCatapult(self): \"\"\"Verify catapult test command.\"\"\" self._tester.catapult_tests", "the cros_sdk wrapper. self._tester._RunAutotest() # Check that we enter the chroot before running", "def testFlash(self): \"\"\"Tests flash command.\"\"\" # Verify that specifying the board gets the", "'/tmp/.ssh/testing_rsa' tast_cache_dir = cros_test_lib.FakeSDKCache( self._tester.cache_dir).CreateCacheReference( self._tester._device.board, 'chromeos-base') tast_bin_dir = os.path.join(tast_cache_dir, 'tast-cmd/usr/bin') osutils.SafeMakedirs(tast_bin_dir) self._tester.Run()", "def testCatapultAsGuest(self): \"\"\"Verify that we use the correct browser in guest mode.\"\"\" self._tester.catapult_tests", "sent to the device. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'dbus-send', '--system', '--type=method_call', '--dest=org.chromium.PowerManager',", "\"\"\"Verify we get a parser error for --chrome-test when no args are given.\"\"\"", "test cases.\"\"\" def CheckParserError(self, args, error_msg): \"\"\"Checks that parser error is raised. Args:", "'/fake/file'], 'is not a file') # Parser error when a file in --files", "arguments. error_msg: Error message to check for. \"\"\" # Recreate args as a", "raised. Args: args: List of commandline arguments. error_msg: Error message to check for.", "osutils.SafeMakedirs(tast_bin_dir) self._tester.Run() self.assertCommandContains([ os.path.join(tast_bin_dir, 'tast'), 'run', '-build=false', '-waituntilready', '-remoterunner=%s' % os.path.join(tast_bin_dir, 'remote_test_runner'), '-remotebundledir=%s'", "test_exe to be an executable. osutils.Touch(os.path.join(self._tester.build_dir, runtime_deps[0]), mode=0o700) for dep in runtime_deps[1:]: osutils.Touch(os.path.join(self._tester.build_dir,", "self.CheckParserError(['--results-src', '/tmp/results', '--results-dest-dir', filename], 'existing file') def testParserErrorCommands(self): \"\"\"Verify we get parser errors", "return_value=True) def testBasic(self, isrunning_mock): \"\"\"Tests basic functionality.\"\"\" self._tester.Run() isrunning_mock.assert_called() # Run vm_sanity. self.assertCommandContains([", "self._tester.guest = True self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system-guest', 'testAddResults' ]) def testRunDeviceCmd(self):", "def testChromeTestExeArg(self): \"\"\"Verify build/deploy and chrome test commands when a test arg is", "self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system-guest', 'testAddResults' ]) def testRunDeviceCmd(self): \"\"\"Verify a run device", "the cwd is not an absolute path. self.CheckParserError(['--cwd', 'tmp/cwd'], 'cwd must be an", "self._tester.cwd], expected=False) self.assertCommandContains(['rm', '-rf'], expected=False) def testHostCmd(self): \"\"\"Verify running a host command.\"\"\" self._tester.host_cmd", "chroot before running test_that. self.assertIn(('cros_sdk -- test_that --board amd64-generic --no-quickmerge' \" --ssh_options '-F", "when --files-from does not exist. self.CheckParserError(['--files-from', '/fake/file'], 'is not a file') # Parser", "\"\"\"Tests a simple autotest call.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.Run() # Check VM got", "Args: opts: Cmd-line args to cros_test used to build a CrOSTest. Returns: An", "browser in guest mode.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.guest = True self._tester.Run() self.assertCommandContains([ 'python',", "in runtime_deps[1:]: osutils.Touch(os.path.join(self._tester.build_dir, dep), makedirs=True) # Mocks the output by providing necessary runtime", "to the device. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'dbus-send', '--system', '--type=method_call', '--dest=org.chromium.PowerManager', '/org/chromium/PowerManager',", "self._tester.deploy = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.Run() self.assertCommandContains(['deploy_chrome', '--force', '--build-dir', self._tester.build_dir, '--process-timeout', '180',", "= 100 self._tester._device.log_level = 'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port = None self._tester._device.device =", "error when a file in --files has an absolute path. self.CheckParserError(['--files', '/etc/lsb-release'], 'should", "= True self._tester.args = ['/usr/local/autotest/bin/vm_sanity.py'] self._tester.Run() self.assertCommandContains(['ssh', '-p', '9222', '/usr/local/autotest/bin/vm_sanity.py']) self.assertCommandContains(['mkdir', '-p'], expected=False)", "Chrome.\"\"\" # Parser error if no build directory is specified. self.CheckParserError('--build', '--build-dir') #", "command.\"\"\" self._tester.host_cmd = True self._tester.build_dir = '/some/chromium/dir' self._tester.args = ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin']", "self._tester.catapult_tests = ['testAddResults'] self._tester.guest = True self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system-guest', 'testAddResults'", "constants.SOURCE_ROOT)) test_results_dir = os.path.join(cwd, 'test_results') testing_rsa_dir = os.path.join(cwd, '.ssh/testing_rsa') self._tester._RunAutotest() self.assertCommandCalled( ['test_that', '--board',", "test_label in this instance. label_root = test_label.split(':')[0].lstrip('/') # A few files used by", "to cros_test used to build a CrOSTest. Returns: An instance of cros_test.CrOSTest. \"\"\"", "talking to the VM. self.assertCommandContains( ['ssh', '-p', '12345', 'root@localhost', '--', 'true']) def testFlash(self):", "runs in the autotest directory. self.assertCommandContains('cd /usr/local/autotest && ./bin/vm_sanity.py') def testRunDeviceCmdWithoutSrcFiles(self): \"\"\"Verify running", "command runs in the autotest directory. self.assertCommandContains('cd /usr/local/autotest && ./bin/vm_sanity.py') def testRunDeviceCmdWithoutSrcFiles(self): \"\"\"Verify", "with outcap.OutputCapturer() as output: cros_test.ParseCommandLine(args) self.assertIn(error_msg, output.GetStderr()) def testParserErrorChromeTest(self): \"\"\"Verify we get a", "args = [args] # Putting outcap.OutputCapturer() before assertRaises(SystemExit) # swallows SystemExit exception check.", "CrOSTest suitable for testing. Args: opts: Cmd-line args to cros_test used to build", "temp file path.\"\"\" def createTester(self, opts=None): \"\"\"Builds a CrOSTest suitable for testing. Args:", "expected=False) self.assertCommandContains(['cd %s && /usr/local/autotest/bin/' 'vm_sanity.py' % self._tester.cwd], expected=False) self.assertCommandContains(['rm', '-rf'], expected=False) def", "chrome test commands using scp to copy.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:'", "absolute path. self.CheckParserError(['--files', '/etc/lsb-release'], 'should be a relative path') # Parser error when", "the board gets the latest canary. self._tester.flash = True self._tester.public_image = True self._tester._device.board", "list if it is given as a string. if isinstance(args, str): args =", "'-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin']) def testExpressionBaseTastTest(self): \"\"\"Verify running a set of tast tests with", "of the particular chrome test. \"\"\" self._tester.args = [test_exe] + test_args if test_args", "'/tmp/results', '--results-dest-dir', filename], 'existing file') def testParserErrorCommands(self): \"\"\"Verify we get parser errors when", "# Ensure FileList uses 'files_from' and ignores 'files'. file_list = ['/tmp/file1', '/tmp/file2', '/tmp/file3']", "the mock doesn't # capture the cros_sdk wrapper. self._tester._RunAutotest() # Check that we", "not specified. The remote command should not change the working directory or create", "of cros_test.CrOSTest. \"\"\" opts = cros_test.ParseCommandLine(opts if opts else []) opts.enable_kvm = True", "for --chrome-test when no args are given.\"\"\" self.CheckParserError('--chrome-test', '--chrome-test') def testParserSetsBuildDir(self): \"\"\"Verify that", "# Parser error if the cwd is not an absolute path. self.CheckParserError(['--cwd', 'tmp/cwd'],", "build/deploy and chrome test commands using rsync to copy.\"\"\" test_exe = 'crypto_unittests' test_label", "\"\"\"Verify that FileList returns the correct files.\"\"\" # Ensure FileList returns files when", "is not raised if it fails. self.assertCommandCalled( ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'], check=False, dryrun=False,", "returns files when files_from is None. files = ['/tmp/filename1', '/tmp/filename2'] self.assertEqual(files, cros_test.FileList(files, None))", "= { cros_set_lsb_release.LSB_KEY_VERSION: '12901.0.0', } self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains( [os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash',", "new VM is responsive. self.assertCommandContains( ['ssh', '-p', '9222', 'root@localhost', '--', 'true']) def testStartVMCustomPort(self):", "\"\"\"Common set up method for all tests.\"\"\" self._tester = self.createTester() def TempFilePath(self, file_path):", "'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains( [os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'localhost', 'xbuddy://remote/octopus/R82-12901.0.0'], expected=False) def testDeployChrome(self): \"\"\"Tests basic", "does not exist. self.CheckParserError(['--files-from', '/fake/file'], 'is not a file') # Parser error when", "a file has a bad path. self.CheckParserError(['--files', '../some_file'], 'cannot start with ..') #", "output='\\n'.join(runtime_deps)) def CheckChromeTestCommands(self, test_exe, test_label, build_dir, test_args=None): \"\"\"Checks to see that chrome test", "output by providing necessary runtime files. self.rc.AddCmdResult( partial_mock.InOrder(['gn', 'desc', test_label]), output='\\n'.join(runtime_deps)) def CheckChromeTestCommands(self,", "self._tester.Run() for filename in self._tester.results_src: self.assertCommandContains(['scp', 'root@localhost:%s' % filename, self._tester.results_dest_dir]) def testFileList(self): \"\"\"Verify", "scp. self.assertCommandContains(['scp', '%s/' % self._tester.staging_dir, 'root@localhost:/usr/local/chrome_test']) rsync_mock.assert_called() def testChromeTestExeArg(self): \"\"\"Verify build/deploy and chrome", "command. self.CheckParserError('--as-chronos', 'as-chronos') # Parser error if there are args, but no command.", "self.assertCommandContains(['rm', '-rf'], expected=False) def testHostCmd(self): \"\"\"Verify running a host command.\"\"\" self._tester.host_cmd = True", "'ui.ChromeLogin'], check=False, dryrun=False, extra_env={'CHROMIUM_OUTPUT_DIR': '/some/chromium/dir'}) # Ensure that --host-cmd does not invoke ssh", "self.assertCommandContains([ 'ssh', '-p', '9222', 'root@localhost', '--', '/usr/local/autotest/bin/vm_sanity.py' ]) def testCatapult(self): \"\"\"Verify catapult test", "'--allow-chrome-crashes', '--no-quickmerge', '--ssh_options', '-F /dev/null -i /dev/null', '172.16.17.32', 'accessibility_Sanity'], dryrun=False, enter_chroot=True) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=True)", "that we enter the chroot before running test_that. self.assertIn(('cros_sdk -- test_that --board amd64-generic", "= self.TempFilePath('out_amd64-generic/Release/crypto_unittests') # Retrieves the build directory from the parsed options. build_dir =", "test test cases.\"\"\" def SetUpChromeTest(self, test_exe, test_label, test_args=None): \"\"\"Sets configurations necessary for running", "if needed. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'stop ui']) # Ensure a user", "= True self._tester.args = ['crypto_unittests', '--test-launcher-print-test-stdio=always'] self._tester.Run() # Ensure target directory is created", "%s && /usr/local/autotest/bin/' 'vm_sanity.py' % self._tester.cwd], expected=False) self.assertCommandContains(['rm', '-rf'], expected=False) def testHostCmd(self): \"\"\"Verify", "def testTastTestSDK(self): \"\"\"Verify running tast tests from the SimpleChrome SDK.\"\"\" self._tester.tast = ['ui.ChromeLogin']", "% (test_exe, test_label), makedirs=True) self._tester.build = True self._tester.deploy = True self._tester.chrome_test_target = test_exe", "self.assertEqual(build_dir, os.path.dirname(test_dir)) def testParserErrorBuild(self): \"\"\"Verify parser errors for building/deploying Chrome.\"\"\" # Parser error", "testSingleBaseTastTest(self): \"\"\"Verify running a single tast test.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.Run() self.assertCommandContains(['tast', 'run',", "build_dir, test_label, 'runtime_deps']) # Ensure UI is stopped so the test can grab", "|| \"dep:android\") && !flaky && !disabled)' ] self._tester.Run() self.assertCommandContains([ 'tast', 'run', '-build=false', '-waituntilready',", "chrome test. test_args: A list of arguments of the particular chrome test. \"\"\"", "= True self._tester.cwd = '/usr/local/autotest' self._tester.args = ['./bin/vm_sanity.py'] self._tester.Run() # Ensure command runs", "# swallows SystemExit exception check. with self.assertRaises(SystemExit): with outcap.OutputCapturer() as output: cros_test.ParseCommandLine(args) self.assertIn(error_msg,", "True self._tester.files = [self.TempFilePath('crypto_unittests')] osutils.Touch(self._tester.files[0], mode=0o700) self._tester.as_chronos = True self._tester.args = ['crypto_unittests', '--test-launcher-print-test-stdio=always']", "self._tester.tast = ['ui.ChromeLogin'] self._tester._device.private_key = '/tmp/.ssh/testing_rsa' tast_cache_dir = cros_test_lib.FakeSDKCache( self._tester.cache_dir).CreateCacheReference( self._tester._device.board, 'chromeos-base') tast_bin_dir", "get parser errors when specifying the cwd.\"\"\" # Parser error if the cwd", "file_path) class CrOSTester(CrOSTesterBase): \"\"\"Tests miscellaneous utility methods\"\"\" def testStartVM(self): \"\"\"Verify that a new", "= ['/tmp/results/cmd_results', '/tmp/results/filename.txt', '/tmp/results/test_results'] self._tester.results_dest_dir = self.TempFilePath('results_dir') osutils.SafeMakedirs(self._tester.results_dest_dir) self._tester.Run() for filename in self._tester.results_src:", "test_results_dir = os.path.join(cwd, 'test_results') testing_rsa_dir = os.path.join(cwd, '.ssh/testing_rsa') self._tester._RunAutotest() self.assertCommandCalled( ['test_that', '--board', 'amd64-generic',", "['--test-launcher-print-test-stdio=auto'] self.SetUpChromeTest(test_exe, test_label, test_args) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir, test_args) class CrOSTesterParser(CrOSTesterBase): \"\"\"Tests parser", "an absolute path') def testParserErrorFiles(self): \"\"\"Verify we get parser errors with --files.\"\"\" #", "error if no test command is provided. self.CheckParserError('--remote-cmd', 'specify test command') # Parser", "no args are given.\"\"\" self.CheckParserError('--chrome-test', '--chrome-test') def testParserSetsBuildDir(self): \"\"\"Verify that the build directory", "is running. self.assertCommandContains([ 'test_that', '--no-quickmerge', '--ssh_options', '-F /dev/null -i /dev/null', 'localhost:9222', 'accessibility_Sanity']) def", "= self.TempFilePath('out_amd64-generic/Release') self._tester.Run() self.assertCommandContains(['deploy_chrome', '--force', '--build-dir', self._tester.build_dir, '--process-timeout', '180', '--device', self._tester._device.device + ':9222',", "'-build=false', '-waituntilready', '-remoterunner=%s' % os.path.join(tast_bin_dir, 'remote_test_runner'), '-remotebundledir=%s' % os.path.join(tast_cache_dir, 'tast-remote-tests-cros/usr', 'libexec/tast/bundles/remote'), '-remotedatadir=%s' %", "def CheckParserError(self, args, error_msg): \"\"\"Checks that parser error is raised. Args: args: List", "self.CheckParserError(['--results-src', '/tmp/results'], 'with results-src') # Parser error if no results source is given.", "deploy chrome command.\"\"\" self._tester.deploy = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.Run() self.assertCommandContains(['deploy_chrome', '--force', '--build-dir',", "a CrOSTest. Returns: An instance of cros_test.CrOSTest. \"\"\" opts = cros_test.ParseCommandLine(opts if opts", "'%s/' % self._tester.staging_dir, 'root@localhost:/usr/local/chrome_test']) rsync_mock.assert_called() def testChromeTestExeArg(self): \"\"\"Verify build/deploy and chrome test commands", "\"type\": \"console_test_launcher\", } }\"\"\" % (test_exe, test_label), makedirs=True) self._tester.build = True self._tester.deploy =", "'ui.ChromeLogin']) def testTastTestSDK(self): \"\"\"Verify running tast tests from the SimpleChrome SDK.\"\"\" self._tester.tast =", "pytestmark = cros_test_lib.pytestmark_inside_only assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'", "using chronos without a test command. self.CheckParserError('--as-chronos', 'as-chronos') # Parser error if there", "' '.join(test_args) if test_args else '' # Ensure the chrome test is run.", "]) class CrOSTesterChromeTest(CrOSTesterBase): \"\"\"Tests chrome test test cases.\"\"\" def SetUpChromeTest(self, test_exe, test_label, test_args=None):", "no build directory is specified. self.CheckParserError('--build', '--build-dir') # Parser error if build directory", "None. files = ['/tmp/filename1', '/tmp/filename2'] self.assertEqual(files, cros_test.FileList(files, None)) # Ensure FileList returns files", "\"\"\"Checks to see that chrome test commands ran properly. Args: test_exe: The name", "['ui.ChromeLogin'] self._tester._device.private_key = '/tmp/.ssh/testing_rsa' tast_cache_dir = cros_test_lib.FakeSDKCache( self._tester.cache_dir).CreateCacheReference( self._tester._device.board, 'chromeos-base') tast_bin_dir = os.path.join(tast_cache_dir,", "if test_args else '' # Ensure the chrome test is run. self.assertCommandContains(['ssh', '-p',", "def testRunDeviceCmd(self): \"\"\"Verify a run device cmd call.\"\"\" self._tester.remote_cmd = True self._tester.files =", "= True self._tester.mount = True self._tester.Run() self.assertCommandContains(['--nostrip', '--mount']) def testFetchResults(self): \"\"\"Verify that results", "error when --files-from does not exist. self.CheckParserError(['--files-from', '/fake/file'], 'is not a file') #", "when no args are given.\"\"\" self.CheckParserError('--chrome-test', '--chrome-test') def testParserSetsBuildDir(self): \"\"\"Verify that the build", "temporary file path lasting for the duration of a test.\"\"\" return os.path.join(self.tempdir, file_path)", "= ['ui.ChromeLogin'] self._tester.test_timeout = 100 self._tester._device.log_level = 'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port =", "= ['/usr/local/autotest/bin/vm_sanity.py'] self._tester.Run() self.assertCommandContains(['ssh', '-p', '9222', '/usr/local/autotest/bin/vm_sanity.py']) self.assertCommandContains(['mkdir', '-p'], expected=False) self.assertCommandContains(['cd %s &&", "for testing. Args: opts: Cmd-line args to cros_test used to build a CrOSTest.", "['testAddResults'] self._tester.guest = True self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system-guest', 'testAddResults' ]) def", "self.assertCommandContains(['mkdir', '-p', '/usr/local/cros_test']) # Ensure test ssh keys are authorized with chronos. self.assertCommandContains(['cp',", "= ['--test-launcher-print-test-stdio=auto'] self.SetUpChromeTest(test_exe, test_label, test_args) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir, test_args) class CrOSTesterParser(CrOSTesterBase): \"\"\"Tests", "'/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system-guest', 'testAddResults' ]) def testRunDeviceCmd(self): \"\"\"Verify a run device cmd call.\"\"\"", "an autotest call with attributes.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir = 'test_results' self._tester._device.private_key =", "lasting for the duration of a test.\"\"\" return os.path.join(self.tempdir, file_path) class CrOSTester(CrOSTesterBase): \"\"\"Tests", "build directory is set when not specified.\"\"\" test_dir = self.TempFilePath('out_amd64-generic/Release/crypto_unittests') # Retrieves the", "self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir = '/mnt/host/source/test_results' self._tester._device.private_key = '/mnt/host/source/.ssh/testing_rsa' self._tester._RunAutotest() self.assertCommandContains([ '--results_dir', '/mnt/host/source/test_results',", "if it is given as a string. if isinstance(args, str): args = [args]", "'/mnt/host/source/.ssh/testing_rsa']) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=False) def testOutsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an autotest from outside the", "'-keyfile', '/tmp/.ssh/testing_rsa', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin' ]) class CrOSTesterChromeTest(CrOSTesterBase): \"\"\"Tests chrome test test cases.\"\"\"", "by providing necessary runtime files. self.rc.AddCmdResult( partial_mock.InOrder(['gn', 'desc', test_label]), output='\\n'.join(runtime_deps)) def CheckChromeTestCommands(self, test_exe,", "instance of cros_test.CrOSTest. \"\"\" opts = cros_test.ParseCommandLine(opts if opts else []) opts.enable_kvm =", "the chrome test. test_label: The label of the chrome test. build_dir: The directory", "FileList returns files when files_from is None. files = ['/tmp/filename1', '/tmp/filename2'] self.assertEqual(files, cros_test.FileList(files,", "tester._device.image_path = self.TempFilePath( 'chromiumos_qemu_image.bin') osutils.Touch(tester._device.image_path) version_str = ('QEMU emulator version 2.6.0, Copyright (c)", "Ensure target directory is removed at the end of the test. self.assertCommandContains(['rm', '-rf',", "output: cros_test.ParseCommandLine(args) self.assertIn(error_msg, output.GetStderr()) def testParserErrorChromeTest(self): \"\"\"Verify we get a parser error for", "a custom SSH port is supported for tests.\"\"\" self._tester = self.createTester(opts=['--ssh-port=12345']) self._tester.start_vm =", "testOutsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an autotest from outside the chroot.\"\"\" # Checks that", "is removed at the end of the test. self.assertCommandContains(['rm', '-rf', '/usr/local/cros_test']) def testRunDeviceCmdWithSetCwd(self):", "not needed.\"\"\" self._tester.flash = True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12901.0.0',", "if there are args, but no command. self.CheckParserError('--some_test some_command', '--remote-cmd or --host-cmd or", "import sys import mock import pytest # pylint: disable=import-error from chromite.lib import constants", "# Parser error when a file has a bad path. self.CheckParserError(['--files', '../some_file'], 'cannot", "Check if new VM is responsive. self.assertCommandContains( ['ssh', '-p', '9222', 'root@localhost', '--', 'true'])", "makedirs=True) self._tester.build = True self._tester.deploy = True self._tester.chrome_test_target = test_exe self._tester.chrome_test_deploy_target_dir = '/usr/local/chrome_test'", "self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'cd /usr/local/chrome_test && su chronos -c -- '", "'with results-dest-dir') # Parser error if results destination dir is a file. filename", "a list if it is given as a string. if isinstance(args, str): args", "SimpleChrome SDK.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester._device.private_key = '/tmp/.ssh/testing_rsa' tast_cache_dir = cros_test_lib.FakeSDKCache( self._tester.cache_dir).CreateCacheReference( self._tester._device.board,", "looks like //crypto:crypto_unittests. # label_root extracts 'crypto' from the test_label in this instance.", "self._tester.build_dir) # Ensure files are being copied over to the device using rsync.", "= True self._tester.Run() # Check if new VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) #", "existing directory. self.CheckParserError(['--deploy', '--build-dir', '/not/a/directory'], 'not a directory') def testParserErrorResultsSrc(self): \"\"\"Verify parser errors", "'telemetry/bin/run_tests', '--browser=system-guest', 'testAddResults' ]) def testRunDeviceCmd(self): \"\"\"Verify a run device cmd call.\"\"\" self._tester.remote_cmd", "'ssh://localhost:9222', 'xbuddy://remote/octopus/R82-12901.0.0']) def testFlashSkip(self): \"\"\"Tests flash command is skipped when not needed.\"\"\" self._tester.flash", "\"\"\"Verify running a remote command when src files are not specified. The remote", "self.assertCommandContains(['tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin']) def testExpressionBaseTastTest(self): \"\"\"Verify running a set", "'-enable-kvm']) # Checks that autotest is running. self.assertCommandContains([ 'test_that', '--no-quickmerge', '--ssh_options', '-F /dev/null", "test_label looks like //crypto:crypto_unittests. # label_root extracts 'crypto' from the test_label in this", "that --host-cmd does not invoke ssh since it runs on the host. self.assertCommandContains(['ssh',", "The name of the chrome test. test_label: The label of the chrome test.", "testStartVM(self): \"\"\"Verify that a new VM is started before running tests.\"\"\" self._tester.start_vm =", "'--board', 'amd64-generic', '--cache-dir', self._tester.cache_dir]) def testDeployChromeWithArgs(self): \"\"\"Tests deploy chrome command with additional arguments.\"\"\"", "a chrome test. Args: test_exe: The name of the chrome test. test_label: The", "when a test arg is given.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:' +", "'-p'], expected=False) self.assertCommandContains(['cd %s && /usr/local/autotest/bin/' 'vm_sanity.py' % self._tester.cwd], expected=False) self.assertCommandContains(['rm', '-rf'], expected=False)", "opts = cros_test.ParseCommandLine(opts if opts else []) opts.enable_kvm = True # We check", "autotest call with attributes.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir = 'test_results' self._tester._device.private_key = '.ssh/testing_rsa'", "def testInsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an autotest from within the chroot.\"\"\" # Checks", "expected=False) @pytest.mark.usefixtures('testcase_caplog') class CrOSTesterAutotest(CrOSTesterBase): \"\"\"Tests autotest test cases.\"\"\" def testBasicAutotest(self): \"\"\"Tests a simple", "[test_exe] self._tester.chrome_test = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') osutils.SafeMakedirs(self._tester.build_dir) isolate_map = self.TempFilePath('testing/buildbot/gn_isolate_map.pyl') # Add", "def testFileList(self): \"\"\"Verify that FileList returns the correct files.\"\"\" # Ensure FileList returns", "uses 'files_from' and ignores 'files'. file_list = ['/tmp/file1', '/tmp/file2', '/tmp/file3'] osutils.WriteFile(files_from, '\\n'.join(file_list)) self.assertEqual(file_list,", "testExpressionBaseTastTest(self): \"\"\"Verify running a set of tast tests with an expression.\"\"\" self._tester.tast =", "invoke ssh since it runs on the host. self.assertCommandContains(['ssh', 'tast'], expected=False) @pytest.mark.usefixtures('testcase_caplog') class", "'-remotebundledir=%s' % os.path.join(tast_cache_dir, 'tast-remote-tests-cros/usr', 'libexec/tast/bundles/remote'), '-remotedatadir=%s' % os.path.join( tast_cache_dir, 'tast-remote-tests-cros/usr', 'share/tast/data'), '-ephemeraldevserver=true', '-keyfile',", "is stopped so the test can grab the GPU if needed. self.assertCommandContains(['ssh', '-p',", "command') # Parser error if using chronos without a test command. self.CheckParserError('--as-chronos', 'as-chronos')", "using certain commands.\"\"\" # Parser error if no test command is provided. self.CheckParserError('--remote-cmd',", "dependencies are checked for. self.assertCommandContains(['gn', 'desc', build_dir, test_label, 'runtime_deps']) # Ensure UI is", "\"\"\"Creates a temporary file path lasting for the duration of a test.\"\"\" return", "running a single tast test with various arguments.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.test_timeout =", "arguments.\"\"\" self._tester.deploy = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.nostrip = True self._tester.mount = True", "cros_test.ParseCommandLine( ['--chrome-test', '--', test_dir]).build_dir self.assertEqual(build_dir, os.path.dirname(test_dir)) def testParserErrorBuild(self): \"\"\"Verify parser errors for building/deploying", "= True self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system-guest', 'testAddResults' ]) def testRunDeviceCmd(self): \"\"\"Verify", "'xbuddy://remote/octopus-full/latest',]) # Specify an xbuddy link. self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'),", "!flaky && !disabled)' ] self._tester.Run() self.assertCommandContains([ 'tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', '((\"dep:chrome\"", "set when not specified.\"\"\" test_dir = self.TempFilePath('out_amd64-generic/Release/crypto_unittests') # Retrieves the build directory from", "arguments. \"\"\" # Ensure chrome is being built. self.assertCommandContains(['autoninja', '-C', build_dir, test_exe]) #", "been called. # TODO(crbug/1065172): Invalid assertion that had previously been mocked. # check_inside_chroot_mock.assert_called()", "'amd64-generic', '--results_dir', test_results_dir, '--ssh_private_key', testing_rsa_dir, '--debug', '--allow-chrome-crashes', '--no-quickmerge', '--ssh_options', '-F /dev/null -i /dev/null',", "miscellaneous utility methods\"\"\" def testStartVM(self): \"\"\"Verify that a new VM is started before", "= os.path.join(tast_cache_dir, 'tast-cmd/usr/bin') osutils.SafeMakedirs(tast_bin_dir) self._tester.Run() self.assertCommandContains([ os.path.join(tast_bin_dir, 'tast'), 'run', '-build=false', '-waituntilready', '-remoterunner=%s' %", "relative path') # Parser error when a file has a bad path. self.CheckParserError(['--files',", "mode=0o700) self._tester.as_chronos = True self._tester.args = ['crypto_unittests', '--test-launcher-print-test-stdio=always'] self._tester.Run() # Ensure target directory", "so the test can grab the GPU if needed. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost',", "self._tester.start_vm = True self._tester.Run() # Check that we use the custom port when", "self.assertCommandContains([ 'test_that', '--no-quickmerge', '--ssh_options', '-F /dev/null -i /dev/null', 'localhost:9222', 'accessibility_Sanity']) def testAutotestWithArgs(self): \"\"\"Tests", "def testFlashSkip(self): \"\"\"Tests flash command is skipped when not needed.\"\"\" self._tester.flash = True", "\"\"\"Unit tests for CrOSTest.\"\"\" from __future__ import print_function import os import sys import", "'((\"dep:chrome\" || \"dep:android\") && !flaky && !disabled)' ]) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot') def testTastTestWithOtherArgs(self, check_inside_chroot_mock): \"\"\"Verify", "# pylint: disable=protected-access class CrOSTesterBase(cros_test_lib.RunCommandTempDirTestCase): \"\"\"Base class for setup and creating a temp", "of a test.\"\"\" return os.path.join(self.tempdir, file_path) class CrOSTester(CrOSTesterBase): \"\"\"Tests miscellaneous utility methods\"\"\" def", "# Recreate args as a list if it is given as a string.", "\"dep:android\") && !flaky && !disabled)' ] self._tester.Run() self.assertCommandContains([ 'tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm',", "% self._tester.cwd], expected=False) self.assertCommandContains(['rm', '-rf'], expected=False) def testHostCmd(self): \"\"\"Verify running a host command.\"\"\"", "'-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin' ]) class CrOSTesterChromeTest(CrOSTesterBase): \"\"\"Tests chrome test test cases.\"\"\" def SetUpChromeTest(self,", "self._tester._device.device = '172.16.17.32' self._tester.test_that_args = ['--test_that-args', '--allow-chrome-crashes'] cwd = os.path.join('/mnt/host/source', os.path.relpath(os.getcwd(), constants.SOURCE_ROOT)) test_results_dir", "'localhost:9222', 'ui.ChromeLogin' ]) class CrOSTesterChromeTest(CrOSTesterBase): \"\"\"Tests chrome test test cases.\"\"\" def SetUpChromeTest(self, test_exe,", "that we use the custom port when talking to the VM. self.assertCommandContains( ['ssh',", "\"\"\"Tests parser test cases.\"\"\" def CheckParserError(self, args, error_msg): \"\"\"Checks that parser error is", "False tester._device.board = 'amd64-generic' tester._device.image_path = self.TempFilePath( 'chromiumos_qemu_image.bin') osutils.Touch(tester._device.image_path) version_str = ('QEMU emulator", "'vm_sanity.py' % self._tester.cwd], expected=False) self.assertCommandContains(['rm', '-rf'], expected=False) def testHostCmd(self): \"\"\"Verify running a host", "The label of the chrome test. build_dir: The directory where chrome is built.", "run. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'cd /usr/local/chrome_test && su chronos -c --", "'--mount']) def testFetchResults(self): \"\"\"Verify that results files/directories are copied from the DUT.\"\"\" self._tester.results_src", "def TempFilePath(self, file_path): \"\"\"Creates a temporary file path lasting for the duration of", "call with attributes.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir = 'test_results' self._tester._device.private_key = '.ssh/testing_rsa' self._tester._device.log_level", "is given as a string. if isinstance(args, str): args = [args] # Putting", "self.SetUpChromeTest(test_exe, test_label) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir) # Ensure files are being copied over", "\"%s\", \"type\": \"console_test_launcher\", } }\"\"\" % (test_exe, test_label), makedirs=True) self._tester.build = True self._tester.deploy", "in guest mode.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.guest = True self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/'", "= True self._tester.chrome_test_target = test_exe self._tester.chrome_test_deploy_target_dir = '/usr/local/chrome_test' # test_label looks like //crypto:crypto_unittests.", "(test_exe, args)]) def testChromeTestRsync(self): \"\"\"Verify build/deploy and chrome test commands using rsync to", "= '/some/chromium/dir' self._tester.args = ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'] self._tester.Run() # Ensure command is", "from the DUT.\"\"\" self._tester.results_src = ['/tmp/results/cmd_results', '/tmp/results/filename.txt', '/tmp/results/test_results'] self._tester.results_dest_dir = self.TempFilePath('results_dir') osutils.SafeMakedirs(self._tester.results_dest_dir) self._tester.Run()", "= True self._tester.public_image = True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12900.0.0',", "def testParserErrorCommands(self): \"\"\"Verify we get parser errors when using certain commands.\"\"\" # Parser", "def testChromeTestRsync(self): \"\"\"Verify build/deploy and chrome test commands using rsync to copy.\"\"\" test_exe", "the chroot before running test_that. self.assertIn(('cros_sdk -- test_that --board amd64-generic --no-quickmerge' \" --ssh_options", "not an existing directory. self.CheckParserError(['--deploy', '--build-dir', '/not/a/directory'], 'not a directory') def testParserErrorResultsSrc(self): \"\"\"Verify", "osutils.WriteFile(isolate_map, \"\"\"{ \"%s\": { \"label\": \"%s\", \"type\": \"console_test_launcher\", } }\"\"\" % (test_exe, test_label),", "The remote command should not change the working directory or create a temp", "test_dir = self.TempFilePath('out_amd64-generic/Release/crypto_unittests') # Retrieves the build directory from the parsed options. build_dir", "self.CheckParserError('--some_test some_command', '--remote-cmd or --host-cmd or --chrome-test') # Parser error when additional args", "extracts 'crypto' from the test_label in this instance. label_root = test_label.split(':')[0].lstrip('/') # A", "configurations necessary for running a chrome test. Args: test_exe: The name of the", "&& !disabled)' ] self._tester.Run() self.assertCommandContains([ 'tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', '((\"dep:chrome\" ||", "for filename in self._tester.results_src: self.assertCommandContains(['scp', 'root@localhost:%s' % filename, self._tester.results_dest_dir]) def testFileList(self): \"\"\"Verify that", "utility methods\"\"\" def testStartVM(self): \"\"\"Verify that a new VM is started before running", "= cros_test_lib.FakeSDKCache( self._tester.cache_dir).CreateCacheReference( self._tester._device.board, 'chromeos-base') tast_bin_dir = os.path.join(tast_cache_dir, 'tast-cmd/usr/bin') osutils.SafeMakedirs(tast_bin_dir) self._tester.Run() self.assertCommandContains([ os.path.join(tast_bin_dir,", "the end of the test. self.assertCommandContains(['rm', '-rf', '/usr/local/cros_test']) def testRunDeviceCmdWithSetCwd(self): \"\"\"Verify a run", "= '/tmp/dest_dir_file' osutils.Touch(filename) self.CheckParserError(['--results-src', '/tmp/results', '--results-dest-dir', filename], 'existing file') def testParserErrorCommands(self): \"\"\"Verify we", "# Run vm_sanity. self.assertCommandContains([ 'ssh', '-p', '9222', 'root@localhost', '--', '/usr/local/autotest/bin/vm_sanity.py' ]) def testCatapult(self):", "had previously been mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] # Capture the run", "opts.enable_kvm = True # We check if /dev/kvm is writeable to use sudo.", "if the cwd is not an absolute path. self.CheckParserError(['--cwd', 'tmp/cwd'], 'cwd must be", "autotest call.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.Run() # Check VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm'])", "self._tester.nostrip = True self._tester.mount = True self._tester.Run() self.assertCommandContains(['--nostrip', '--mount']) def testFetchResults(self): \"\"\"Verify that", "running an autotest from outside the chroot.\"\"\" # Checks that mock version has", "'/not/a/directory'], 'not a directory') def testParserErrorResultsSrc(self): \"\"\"Verify parser errors for results src/dest directories.\"\"\"", "tests.\"\"\" self._tester.start_vm = True self._tester.Run() # Check if new VM got launched. self.assertCommandContains([self._tester._device.qemu_path,", "a CrOSTest suitable for testing. Args: opts: Cmd-line args to cros_test used to", "(test_exe, test_label), makedirs=True) self._tester.build = True self._tester.deploy = True self._tester.chrome_test_target = test_exe self._tester.chrome_test_deploy_target_dir", "% test_exe, 'gen.runtime/%s/%s/%s.runtime_deps' % (label_root, test_exe, test_exe), '../../third_party/chromite'] # Creates the test_exe to", "self._tester.host_cmd = True self._tester.build_dir = '/some/chromium/dir' self._tester.args = ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'] self._tester.Run()", "os.path.join( tast_cache_dir, 'tast-remote-tests-cros/usr', 'share/tast/data'), '-ephemeraldevserver=true', '-keyfile', '/tmp/.ssh/testing_rsa', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin' ]) class CrOSTesterChromeTest(CrOSTesterBase):", "an # exception is not raised if it fails. self.assertCommandCalled( ['tast', 'run', 'localhost:9222',", "should not change the working directory or create a temp directory on the", "CrOSTesterAutotest(CrOSTesterBase): \"\"\"Tests autotest test cases.\"\"\" def testBasicAutotest(self): \"\"\"Tests a simple autotest call.\"\"\" self._tester.autotest", "'root@localhost', '--', 'stop ui']) # Ensure a user activity ping is sent to", "+ test_exe test_args = ['--test-launcher-print-test-stdio=auto'] self.SetUpChromeTest(test_exe, test_label, test_args) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir, test_args)", "certain commands.\"\"\" # Parser error if no test command is provided. self.CheckParserError('--remote-cmd', 'specify", "Check VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Checks that autotest is running. self.assertCommandContains([", "path lasting for the duration of a test.\"\"\" return os.path.join(self.tempdir, file_path) class CrOSTester(CrOSTesterBase):", "= 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus/R82-12901.0.0']) def testFlashSkip(self): \"\"\"Tests flash", "self._tester._device.log_level = 'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port = None self._tester._device.device = '172.16.17.32' self._tester.test_that_args", "assertion that had previously been mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir =", "= test_label.split(':')[0].lstrip('/') # A few files used by the chrome test. runtime_deps =", "are given.\"\"\" self.CheckParserError('--chrome-test', '--chrome-test') def testParserSetsBuildDir(self): \"\"\"Verify that the build directory is set", "be # found in the LICENSE file. \"\"\"Unit tests for CrOSTest.\"\"\" from __future__", "Error message to check for. \"\"\" # Recreate args as a list if", "is specified. self.CheckParserError('--build', '--build-dir') # Parser error if build directory is not an", "'--build-dir', self._tester.build_dir, '--process-timeout', '180', '--device', self._tester._device.device + ':9222', '--board', 'amd64-generic', '--cache-dir', self._tester.cache_dir]) def", "chrome test is run. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'cd /usr/local/chrome_test && su", "and creating a temp file path.\"\"\" def createTester(self, opts=None): \"\"\"Builds a CrOSTest suitable", "like //crypto:crypto_unittests. # label_root extracts 'crypto' from the test_label in this instance. label_root", "'9222', 'root@localhost', '--', 'dbus-send', '--system', '--type=method_call', '--dest=org.chromium.PowerManager', '/org/chromium/PowerManager', 'org.chromium.PowerManager.HandleUserActivity', 'int32:0']) args = '", "//crypto:crypto_unittests. # label_root extracts 'crypto' from the test_label in this instance. label_root =", "':9222', '--board', 'amd64-generic', '--cache-dir', self._tester.cache_dir]) def testDeployChromeWithArgs(self): \"\"\"Tests deploy chrome command with additional", "self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains( [os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'localhost', 'xbuddy://remote/octopus/R82-12901.0.0'], expected=False) def testDeployChrome(self):", "running. self.assertCommandContains([ 'test_that', '--no-quickmerge', '--ssh_options', '-F /dev/null -i /dev/null', 'localhost:9222', 'accessibility_Sanity']) def testAutotestWithArgs(self):", "build directory from the parsed options. build_dir = cros_test.ParseCommandLine( ['--chrome-test', '--', test_dir]).build_dir self.assertEqual(build_dir,", "'specify test command') # Parser error if using chronos without a test command.", "run device cmd call.\"\"\" self._tester.remote_cmd = True self._tester.files = [self.TempFilePath('crypto_unittests')] osutils.Touch(self._tester.files[0], mode=0o700) self._tester.as_chronos", "src files are not specified. The remote command should not change the working", "@mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=False) def testOutsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an autotest from outside the chroot.\"\"\"", "chrome test commands ran properly. Args: test_exe: The name of the chrome test.", "testParserErrorBuild(self): \"\"\"Verify parser errors for building/deploying Chrome.\"\"\" # Parser error if no build", "= 'crypto_unittests' test_label = '//crypto:' + test_exe test_args = ['--test-launcher-print-test-stdio=auto'] self.SetUpChromeTest(test_exe, test_label, test_args)", "['accessibility_Sanity'] # Capture the run command. This is necessary beacuse the mock doesn't", "cros_test.ParseCommandLine(opts if opts else []) opts.enable_kvm = True # We check if /dev/kvm", "or --chrome-test') # Parser error when additional args don't start with --. self.CheckParserError(['--host-cmd',", "[ './%s' % test_exe, 'gen.runtime/%s/%s/%s.runtime_deps' % (label_root, test_exe, test_exe), '../../third_party/chromite'] # Creates the", "command. This is necessary beacuse the mock doesn't # capture the cros_sdk wrapper.", "\"\"\"Verify that a custom SSH port is supported for tests.\"\"\" self._tester = self.createTester(opts=['--ssh-port=12345'])", "running an autotest from within the chroot.\"\"\" # Checks that mock version has", "running a host command.\"\"\" self._tester.host_cmd = True self._tester.build_dir = '/some/chromium/dir' self._tester.args = ['tast',", "autotest is running. self.assertCommandContains([ 'test_that', '--no-quickmerge', '--ssh_options', '-F /dev/null -i /dev/null', 'localhost:9222', 'accessibility_Sanity'])", "when not specified.\"\"\" test_dir = self.TempFilePath('out_amd64-generic/Release/crypto_unittests') # Retrieves the build directory from the", "class CrOSTesterBase(cros_test_lib.RunCommandTempDirTestCase): \"\"\"Base class for setup and creating a temp file path.\"\"\" def", "methods\"\"\" def testStartVM(self): \"\"\"Verify that a new VM is started before running tests.\"\"\"", "\"\"\"Verify that a new VM is started before running tests.\"\"\" self._tester.start_vm = True", "['./bin/vm_sanity.py'] self._tester.Run() # Ensure command runs in the autotest directory. self.assertCommandContains('cd /usr/local/autotest &&", "def SetUpChromeTest(self, test_exe, test_label, test_args=None): \"\"\"Sets configurations necessary for running a chrome test.", "grab the GPU if needed. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'stop ui']) #", "giving a cwd.\"\"\" self._tester.remote_cmd = True self._tester.cwd = '/usr/local/autotest' self._tester.args = ['./bin/vm_sanity.py'] self._tester.Run()", "setUp(self): \"\"\"Common set up method for all tests.\"\"\" self._tester = self.createTester() def TempFilePath(self,", "'true']) def testStartVMCustomPort(self): \"\"\"Verify that a custom SSH port is supported for tests.\"\"\"", "self._tester.remote_cmd = True self._tester.args = ['/usr/local/autotest/bin/vm_sanity.py'] self._tester.Run() self.assertCommandContains(['ssh', '-p', '9222', '/usr/local/autotest/bin/vm_sanity.py']) self.assertCommandContains(['mkdir', '-p'],", "cros_test used to build a CrOSTest. Returns: An instance of cros_test.CrOSTest. \"\"\" opts", "test_args=None): \"\"\"Checks to see that chrome test commands ran properly. Args: test_exe: The", "test_label, self._tester.build_dir, test_args) class CrOSTesterParser(CrOSTesterBase): \"\"\"Tests parser test cases.\"\"\" def CheckParserError(self, args, error_msg):", "= os.path.join('/mnt/host/source', os.path.relpath(os.getcwd(), constants.SOURCE_ROOT)) test_results_dir = os.path.join(cwd, 'test_results') testing_rsa_dir = os.path.join(cwd, '.ssh/testing_rsa') self._tester._RunAutotest()", "Ensure that --host-cmd does not invoke ssh since it runs on the host.", "self._tester.staging_dir, '[root@localhost]:/usr/local/chrome_test']) @mock.patch('chromite.lib.remote_access.RemoteDevice.HasRsync', return_value=False) def testChromeTestSCP(self, rsync_mock): \"\"\"Verify build/deploy and chrome test commands", "path. self.CheckParserError(['--files', '/etc/lsb-release'], 'should be a relative path') # Parser error when a", "self._tester.build = True self._tester.deploy = True self._tester.chrome_test_target = test_exe self._tester.chrome_test_deploy_target_dir = '/usr/local/chrome_test' #", "sudo. with mock.patch.object(os, 'access', return_value=True): tester = cros_test.CrOSTest(opts) tester._device.use_sudo = False tester._device.board =", "= [ './%s' % test_exe, 'gen.runtime/%s/%s/%s.runtime_deps' % (label_root, test_exe, test_exe), '../../third_party/chromite'] # Creates", "cros_test_lib.pytestmark_inside_only assert sys.version_info >= (3, 6), 'This module requires Python 3.6+' # pylint:", "self.assertCommandContains(['ssh', 'tast'], expected=False) @pytest.mark.usefixtures('testcase_caplog') class CrOSTesterAutotest(CrOSTesterBase): \"\"\"Tests autotest test cases.\"\"\" def testBasicAutotest(self): \"\"\"Tests", "= True self._tester.deploy = True self._tester.chrome_test_target = test_exe self._tester.chrome_test_deploy_target_dir = '/usr/local/chrome_test' # test_label", "= ' '.join(test_args) if test_args else '' # Ensure the chrome test is", "when a file in --files has an absolute path. self.CheckParserError(['--files', '/etc/lsb-release'], 'should be", "self._tester.flash = True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12901.0.0', } self._tester.xbuddy", "the build directory is set when not specified.\"\"\" test_dir = self.TempFilePath('out_amd64-generic/Release/crypto_unittests') # Retrieves", "Check if new VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Check if new VM", "is started before running tests.\"\"\" self._tester.start_vm = True self._tester.Run() # Check if new", "if no build directory is specified. self.CheckParserError('--build', '--build-dir') # Parser error if build", "Add info about the specified chrome test to the isolate map. osutils.WriteFile(isolate_map, \"\"\"{", "Ensure command runs in the target directory. self.assertCommandContains('cd /usr/local/cros_test && crypto_unittests ' '--test-launcher-print-test-stdio=always')", "necessary runtime files. self.rc.AddCmdResult( partial_mock.InOrder(['gn', 'desc', test_label]), output='\\n'.join(runtime_deps)) def CheckChromeTestCommands(self, test_exe, test_label, build_dir,", "# Parser error when additional args don't start with --. self.CheckParserError(['--host-cmd', 'tast', 'run'],", "\"\"\" opts = cros_test.ParseCommandLine(opts if opts else []) opts.enable_kvm = True # We", "'tast-remote-tests-cros/usr', 'libexec/tast/bundles/remote'), '-remotedatadir=%s' % os.path.join( tast_cache_dir, 'tast-remote-tests-cros/usr', 'share/tast/data'), '-ephemeraldevserver=true', '-keyfile', '/tmp/.ssh/testing_rsa', '-extrauseflags=tast_vm', 'localhost:9222',", "import constants from chromite.lib import cros_test from chromite.lib import cros_test_lib from chromite.lib import", "'--cache-dir', self._tester.cache_dir]) def testDeployChromeWithArgs(self): \"\"\"Tests deploy chrome command with additional arguments.\"\"\" self._tester.deploy =", "\"\"\"Verify running a single tast test with various arguments.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.test_timeout", "testFlashSkip(self): \"\"\"Tests flash command is skipped when not needed.\"\"\" self._tester.flash = True self._tester._device.board", "are being copied over to the device using scp. self.assertCommandContains(['scp', '%s/' % self._tester.staging_dir,", "self.assertCommandContains([ '--results_dir', '/mnt/host/source/test_results', '--ssh_private_key', '/mnt/host/source/.ssh/testing_rsa']) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=False) def testOutsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an", "'/mnt/host/source/test_results' self._tester._device.private_key = '/mnt/host/source/.ssh/testing_rsa' self._tester._RunAutotest() self.assertCommandContains([ '--results_dir', '/mnt/host/source/test_results', '--ssh_private_key', '/mnt/host/source/.ssh/testing_rsa']) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=False) def", "of the chrome test. test_label: The label of the chrome test. test_args: A", "'existing file') def testParserErrorCommands(self): \"\"\"Verify we get parser errors when using certain commands.\"\"\"", "for dep in runtime_deps[1:]: osutils.Touch(os.path.join(self._tester.build_dir, dep), makedirs=True) # Mocks the output by providing", "cros_test_lib from chromite.lib import osutils from chromite.lib import partial_mock from chromite.scripts import cros_set_lsb_release", "UI is stopped so the test can grab the GPU if needed. self.assertCommandContains(['ssh',", "@mock.patch('chromite.lib.remote_access.RemoteDevice.HasRsync', return_value=False) def testChromeTestSCP(self, rsync_mock): \"\"\"Verify build/deploy and chrome test commands using scp", "\"\"\"Verify we get parser errors with --files.\"\"\" # Parser error when both --files", "some_command', '--remote-cmd or --host-cmd or --chrome-test') # Parser error when additional args don't", "self.assertCommandCalled( ['test_that', '--board', 'amd64-generic', '--results_dir', test_results_dir, '--ssh_private_key', testing_rsa_dir, '--debug', '--allow-chrome-crashes', '--no-quickmerge', '--ssh_options', '-F", "or create a temp directory on the target. \"\"\" self._tester.remote_cmd = True self._tester.args", "build directory is not an existing directory. self.CheckParserError(['--deploy', '--build-dir', '/not/a/directory'], 'not a directory')", "= True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') osutils.SafeMakedirs(self._tester.build_dir) isolate_map = self.TempFilePath('testing/buildbot/gn_isolate_map.pyl') # Add info about", "self.assertCommandContains('cd /usr/local/cros_test && crypto_unittests ' '--test-launcher-print-test-stdio=always') # Ensure target directory is removed at", "['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'], check=False, dryrun=False, extra_env={'CHROMIUM_OUTPUT_DIR': '/some/chromium/dir'}) # Ensure that --host-cmd does", "assert sys.version_info >= (3, 6), 'This module requires Python 3.6+' # pylint: disable=protected-access", "from chromite.lib import cros_test_lib from chromite.lib import osutils from chromite.lib import partial_mock from", "cmd call.\"\"\" self._tester.remote_cmd = True self._tester.files = [self.TempFilePath('crypto_unittests')] osutils.Touch(self._tester.files[0], mode=0o700) self._tester.as_chronos = True", "running a set of tast tests with an expression.\"\"\" self._tester.tast = [ '((\"dep:chrome\"", "'root@localhost:/usr/local/chrome_test']) rsync_mock.assert_called() def testChromeTestExeArg(self): \"\"\"Verify build/deploy and chrome test commands when a test", "'/mnt/host/source/.ssh/testing_rsa' self._tester._RunAutotest() self.assertCommandContains([ '--results_dir', '/mnt/host/source/test_results', '--ssh_private_key', '/mnt/host/source/.ssh/testing_rsa']) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=False) def testOutsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests", "with an expression.\"\"\" self._tester.tast = [ '((\"dep:chrome\" || \"dep:android\") && !flaky && !disabled)'", "SetUpChromeTest(self, test_exe, test_label, test_args=None): \"\"\"Sets configurations necessary for running a chrome test. Args:", "test commands when a test arg is given.\"\"\" test_exe = 'crypto_unittests' test_label =", "import cros_test_lib from chromite.lib import osutils from chromite.lib import partial_mock from chromite.scripts import", "\"\"\"Verify catapult test command.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system',", "tast test cases.\"\"\" def testSingleBaseTastTest(self): \"\"\"Verify running a single tast test.\"\"\" self._tester.tast =", "tast_cache_dir, 'tast-remote-tests-cros/usr', 'share/tast/data'), '-ephemeraldevserver=true', '-keyfile', '/tmp/.ssh/testing_rsa', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin' ]) class CrOSTesterChromeTest(CrOSTesterBase): \"\"\"Tests", "when talking to the VM. self.assertCommandContains( ['ssh', '-p', '12345', 'root@localhost', '--', 'true']) def", "# Check VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Checks that autotest is running.", "error if there are args, but no command. self.CheckParserError('--some_test some_command', '--remote-cmd or --host-cmd", "args are given.\"\"\" self.CheckParserError('--chrome-test', '--chrome-test') def testParserSetsBuildDir(self): \"\"\"Verify that the build directory is", "specified chrome test to the isolate map. osutils.WriteFile(isolate_map, \"\"\"{ \"%s\": { \"label\": \"%s\",", "returns files when files_from does not exist. files_from = self.TempFilePath('file_list') self.assertEqual(files, cros_test.FileList(files, files_from))", "# TODO(crbug/1065172): Invalid assertion that had previously been mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest =", "testParserErrorChromeTest(self): \"\"\"Verify we get a parser error for --chrome-test when no args are", "of the chrome test. test_label: The label of the chrome test. build_dir: The", "# Parser error if no test command is provided. self.CheckParserError('--remote-cmd', 'specify test command')", "= cros_test_lib.pytestmark_inside_only assert sys.version_info >= (3, 6), 'This module requires Python 3.6+' #", "# pylint: disable=import-error from chromite.lib import constants from chromite.lib import cros_test from chromite.lib", "the device using rsync. self.assertCommandContains(['rsync', '%s/' % self._tester.staging_dir, '[root@localhost]:/usr/local/chrome_test']) @mock.patch('chromite.lib.remote_access.RemoteDevice.HasRsync', return_value=False) def testChromeTestSCP(self,", "def testParserErrorChromeTest(self): \"\"\"Verify we get a parser error for --chrome-test when no args", "|| \"dep:android\") && !flaky && !disabled)' ]) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot') def testTastTestWithOtherArgs(self, check_inside_chroot_mock): \"\"\"Verify running", "True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.Run() self.assertCommandContains(['deploy_chrome', '--force', '--build-dir', self._tester.build_dir, '--process-timeout', '180', '--device', self._tester._device.device", "remote command should not change the working directory or create a temp directory", "&& !flaky && !disabled)' ]) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot') def testTastTestWithOtherArgs(self, check_inside_chroot_mock): \"\"\"Verify running a single", "a test arg is given.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:' + test_exe", "of the chrome test. build_dir: The directory where chrome is built. test_args: Chrome", "Ensure target directory is created on the DUT. self.assertCommandContains(['mkdir', '-p', '/usr/local/cros_test']) # Ensure", "the run command. This is necessary beacuse the mock doesn't # capture the", "build directory is specified. self.CheckParserError('--build', '--build-dir') # Parser error if build directory is", "= '//crypto:' + test_exe self.SetUpChromeTest(test_exe, test_label) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir) # Ensure files", "mock doesn't # capture the cros_sdk wrapper. self._tester._RunAutotest() # Check that we enter", "built. self.assertCommandContains(['autoninja', '-C', build_dir, test_exe]) # Ensure that the runtime dependencies are checked", "'2003-2008 <NAME>') self.rc.AddCmdResult(partial_mock.In('--version'), output=version_str) return tester def setUp(self): \"\"\"Common set up method for", "test cases.\"\"\" def testSingleBaseTastTest(self): \"\"\"Verify running a single tast test.\"\"\" self._tester.tast = ['ui.ChromeLogin']", "the SimpleChrome SDK.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester._device.private_key = '/tmp/.ssh/testing_rsa' tast_cache_dir = cros_test_lib.FakeSDKCache( self._tester.cache_dir).CreateCacheReference(", "pytest # pylint: disable=import-error from chromite.lib import constants from chromite.lib import cros_test from", "that a custom SSH port is supported for tests.\"\"\" self._tester = self.createTester(opts=['--ssh-port=12345']) self._tester.start_vm", "self._tester._device.private_key = '.ssh/testing_rsa' self._tester._device.log_level = 'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port = None self._tester._device.device", "from outside the chroot.\"\"\" # Checks that mock version has been called. #", "'--ssh_options', '-F /dev/null -i /dev/null', 'localhost:9222', 'accessibility_Sanity']) def testAutotestWithArgs(self): \"\"\"Tests an autotest call", "'gen.runtime/%s/%s/%s.runtime_deps' % (label_root, test_exe, test_exe), '../../third_party/chromite'] # Creates the test_exe to be an", "= True self._tester.build_dir = '/some/chromium/dir' self._tester.args = ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'] self._tester.Run() #", "are not specified. The remote command should not change the working directory or", "--host-cmd does not invoke ssh since it runs on the host. self.assertCommandContains(['ssh', 'tast'],", "'//crypto:' + test_exe test_args = ['--test-launcher-print-test-stdio=auto'] self.SetUpChromeTest(test_exe, test_label, test_args) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir,", "name of the chrome test. test_label: The label of the chrome test. test_args:", "rsync. self.assertCommandContains(['rsync', '%s/' % self._tester.staging_dir, '[root@localhost]:/usr/local/chrome_test']) @mock.patch('chromite.lib.remote_access.RemoteDevice.HasRsync', return_value=False) def testChromeTestSCP(self, rsync_mock): \"\"\"Verify build/deploy", "self._tester.test_timeout = 100 self._tester._device.log_level = 'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port = None self._tester._device.device", "The directory where chrome is built. test_args: Chrome test arguments. \"\"\" # Ensure", "dir is given. self.CheckParserError(['--results-src', '/tmp/results'], 'with results-src') # Parser error if no results", "path.\"\"\" def createTester(self, opts=None): \"\"\"Builds a CrOSTest suitable for testing. Args: opts: Cmd-line", "if no results source is given. self.CheckParserError(['--results-dest-dir', '/tmp/dest_dir'], 'with results-dest-dir') # Parser error", "CrOSTesterChromeTest(CrOSTesterBase): \"\"\"Tests chrome test test cases.\"\"\" def SetUpChromeTest(self, test_exe, test_label, test_args=None): \"\"\"Sets configurations", "'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', '((\"dep:chrome\" || \"dep:android\") && !flaky && !disabled)' ])", "attributes.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir = 'test_results' self._tester._device.private_key = '.ssh/testing_rsa' self._tester._device.log_level = 'debug'", "in the target directory. self.assertCommandContains('cd /usr/local/cros_test && crypto_unittests ' '--test-launcher-print-test-stdio=always') # Ensure target", "to the VM. self.assertCommandContains( ['ssh', '-p', '12345', 'root@localhost', '--', 'true']) def testFlash(self): \"\"\"Tests", "is necessary beacuse the mock doesn't # capture the cros_sdk wrapper. self._tester._RunAutotest() #", "% os.path.join(tast_cache_dir, 'tast-remote-tests-cros/usr', 'libexec/tast/bundles/remote'), '-remotedatadir=%s' % os.path.join( tast_cache_dir, 'tast-remote-tests-cros/usr', 'share/tast/data'), '-ephemeraldevserver=true', '-keyfile', '/tmp/.ssh/testing_rsa',", "self.TempFilePath('testing/buildbot/gn_isolate_map.pyl') # Add info about the specified chrome test to the isolate map.", "get parser errors with --files.\"\"\" # Parser error when both --files and --files-from", "single tast test with various arguments.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.test_timeout = 100 self._tester._device.log_level", "errors when using certain commands.\"\"\" # Parser error if no test command is", "about the specified chrome test to the isolate map. osutils.WriteFile(isolate_map, \"\"\"{ \"%s\": {", "/usr/local/cros_test && crypto_unittests ' '--test-launcher-print-test-stdio=always') # Ensure target directory is removed at the", "test_args else '' # Ensure the chrome test is run. self.assertCommandContains(['ssh', '-p', '9222',", "from __future__ import print_function import os import sys import mock import pytest #", "'file_list', '--files-from', 'file'], '--files and --files-from') # Parser error when --files-from does not", "dir, and ensure an # exception is not raised if it fails. self.assertCommandCalled(", "# Parser error when both --files and --files-from are specified. self.CheckParserError(['--files', 'file_list', '--files-from',", "file path lasting for the duration of a test.\"\"\" return os.path.join(self.tempdir, file_path) class", "= [args] # Putting outcap.OutputCapturer() before assertRaises(SystemExit) # swallows SystemExit exception check. with", "if new VM is responsive. self.assertCommandContains( ['ssh', '-p', '9222', 'root@localhost', '--', 'true']) def", "mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] # Capture the run command. This is", "test_label) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir) # Ensure files are being copied over to", "Authors. All rights reserved. # Use of this source code is governed by", "= '/tmp/results' self._tester.Run() check_inside_chroot_mock.assert_called() self.assertCommandContains(['tast', '-verbose', 'run', '-build=false', '-waituntilready', '-timeout=100', '-resultsdir', '/tmp/results', '172.16.17.32',", "test_args) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir, test_args) class CrOSTesterParser(CrOSTesterBase): \"\"\"Tests parser test cases.\"\"\" def", "TODO(crbug/1065172): Invalid assertion that had previously been mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity']", "isinstance(args, str): args = [args] # Putting outcap.OutputCapturer() before assertRaises(SystemExit) # swallows SystemExit", "dryrun=False, extra_env={'CHROMIUM_OUTPUT_DIR': '/some/chromium/dir'}) # Ensure that --host-cmd does not invoke ssh since it", "before assertRaises(SystemExit) # swallows SystemExit exception check. with self.assertRaises(SystemExit): with outcap.OutputCapturer() as output:", "The Chromium OS Authors. All rights reserved. # Use of this source code", "Use of this source code is governed by a BSD-style license that can", "<NAME>') self.rc.AddCmdResult(partial_mock.In('--version'), output=version_str) return tester def setUp(self): \"\"\"Common set up method for all", "enter_chroot=True) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=True) def testInsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an autotest from within the", "self._tester.results_dir = '/tmp/results' self._tester.Run() check_inside_chroot_mock.assert_called() self.assertCommandContains(['tast', '-verbose', 'run', '-build=false', '-waituntilready', '-timeout=100', '-resultsdir', '/tmp/results',", "files. self.rc.AddCmdResult( partial_mock.InOrder(['gn', 'desc', test_label]), output='\\n'.join(runtime_deps)) def CheckChromeTestCommands(self, test_exe, test_label, build_dir, test_args=None): \"\"\"Checks", "Ensure command is run with an env var for the build dir, and", "= self.createTester() def TempFilePath(self, file_path): \"\"\"Creates a temporary file path lasting for the", "self.rc.AddCmdResult( partial_mock.InOrder(['gn', 'desc', test_label]), output='\\n'.join(runtime_deps)) def CheckChromeTestCommands(self, test_exe, test_label, build_dir, test_args=None): \"\"\"Checks to", "self._tester.tast = ['ui.ChromeLogin'] self._tester.Run() self.assertCommandContains(['tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin']) def testExpressionBaseTastTest(self):", "command.\"\"\" self._tester.deploy = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.Run() self.assertCommandContains(['deploy_chrome', '--force', '--build-dir', self._tester.build_dir, '--process-timeout',", "self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12901.0.0', } self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains( [os.path.join(constants.CHROMITE_BIN_DIR, 'cros'),", "\"\"\"Verify that the build directory is set when not specified.\"\"\" test_dir = self.TempFilePath('out_amd64-generic/Release/crypto_unittests')", "test_label, self._tester.build_dir) # Ensure files are being copied over to the device using", "custom SSH port is supported for tests.\"\"\" self._tester = self.createTester(opts=['--ssh-port=12345']) self._tester.start_vm = True", "Ensure test ssh keys are authorized with chronos. self.assertCommandContains(['cp', '-r', '/root/.ssh/', '/home/chronos/user/']) #", "it fails. self.assertCommandCalled( ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'], check=False, dryrun=False, extra_env={'CHROMIUM_OUTPUT_DIR': '/some/chromium/dir'}) # Ensure", "version has been called. # TODO(crbug/1065172): Invalid assertion that had previously been mocked.", "refers to a parent path. self.CheckParserError(['--cwd', '../new_cwd'], 'cwd cannot start with ..') #", "can be # found in the LICENSE file. \"\"\"Unit tests for CrOSTest.\"\"\" from", "test_label = '//crypto:' + test_exe test_args = ['--test-launcher-print-test-stdio=auto'] self.SetUpChromeTest(test_exe, test_label, test_args) self._tester.Run() self.CheckChromeTestCommands(test_exe,", "'chronos:', '/usr/local/cros_test']) # Ensure command runs in the target directory. self.assertCommandContains('cd /usr/local/cros_test &&", "= ['/tmp/file1', '/tmp/file2', '/tmp/file3'] osutils.WriteFile(files_from, '\\n'.join(file_list)) self.assertEqual(file_list, cros_test.FileList(files, files_from)) class CrOSTesterMiscTests(CrOSTesterBase): \"\"\"Tests miscellaneous", "}\"\"\" % (test_exe, test_label), makedirs=True) self._tester.build = True self._tester.deploy = True self._tester.chrome_test_target =", "--chrome-test when no args are given.\"\"\" self.CheckParserError('--chrome-test', '--chrome-test') def testParserSetsBuildDir(self): \"\"\"Verify that the", "from chromite.utils import outcap pytestmark = cros_test_lib.pytestmark_inside_only assert sys.version_info >= (3, 6), 'This", "= self.TempFilePath('results_dir') osutils.SafeMakedirs(self._tester.results_dest_dir) self._tester.Run() for filename in self._tester.results_src: self.assertCommandContains(['scp', 'root@localhost:%s' % filename, self._tester.results_dest_dir])", "# Parser error if no results source is given. self.CheckParserError(['--results-dest-dir', '/tmp/dest_dir'], 'with results-dest-dir')", "% self._tester.staging_dir, 'root@localhost:/usr/local/chrome_test']) rsync_mock.assert_called() def testChromeTestExeArg(self): \"\"\"Verify build/deploy and chrome test commands when", "'localhost:9222', '((\"dep:chrome\" || \"dep:android\") && !flaky && !disabled)' ]) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot') def testTastTestWithOtherArgs(self, check_inside_chroot_mock):", "'--remote-cmd or --host-cmd or --chrome-test') # Parser error when additional args don't start", "device cmd call.\"\"\" self._tester.remote_cmd = True self._tester.files = [self.TempFilePath('crypto_unittests')] osutils.Touch(self._tester.files[0], mode=0o700) self._tester.as_chronos =", "'runtime_deps']) # Ensure UI is stopped so the test can grab the GPU", "'9222', 'root@localhost', '--', 'stop ui']) # Ensure a user activity ping is sent", "self._tester.autotest = ['accessibility_Sanity'] self._tester.Run() # Check VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Checks", "% (test_exe, args)]) def testChromeTestRsync(self): \"\"\"Verify build/deploy and chrome test commands using rsync", "a new VM is started before running tests.\"\"\" self._tester.start_vm = True self._tester.Run() #", "absolute. self.CheckParserError(['--results-src', 'tmp/results'], 'absolute') # Parser error if no results destination dir is", "@mock.patch('chromite.lib.cros_build_lib.IsInsideChroot') def testTastTestWithOtherArgs(self, check_inside_chroot_mock): \"\"\"Verify running a single tast test with various arguments.\"\"\"", "tests.\"\"\" self._tester = self.createTester() def TempFilePath(self, file_path): \"\"\"Creates a temporary file path lasting", "self.CheckParserError(['--files', '/etc/lsb-release'], 'should be a relative path') # Parser error when a file", "\"\"\"Tests flash command is skipped when not needed.\"\"\" self._tester.flash = True self._tester._device.board =", "+ test_args if test_args else [test_exe] self._tester.chrome_test = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') osutils.SafeMakedirs(self._tester.build_dir)", "self._tester.Run() # Check if new VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Check if", "errors for building/deploying Chrome.\"\"\" # Parser error if no build directory is specified.", "test can grab the GPU if needed. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'stop", "self._tester.Run() self.assertCommandContains(['tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin']) def testExpressionBaseTastTest(self): \"\"\"Verify running a", "= [ '((\"dep:chrome\" || \"dep:android\") && !flaky && !disabled)' ] self._tester.Run() self.assertCommandContains([ 'tast',", "./bin/vm_sanity.py') def testRunDeviceCmdWithoutSrcFiles(self): \"\"\"Verify running a remote command when src files are not", "not a file') # Parser error when a file in --files has an", "build dir, and ensure an # exception is not raised if it fails.", "label of the chrome test. test_args: A list of arguments of the particular", "files when files_from does not exist. files_from = self.TempFilePath('file_list') self.assertEqual(files, cros_test.FileList(files, files_from)) #", "a file in --files has an absolute path. self.CheckParserError(['--files', '/etc/lsb-release'], 'should be a", "self.assertCommandContains([ os.path.join(tast_bin_dir, 'tast'), 'run', '-build=false', '-waituntilready', '-remoterunner=%s' % os.path.join(tast_bin_dir, 'remote_test_runner'), '-remotebundledir=%s' % os.path.join(tast_cache_dir,", "= True self._tester.Run() self.assertCommandContains(['--nostrip', '--mount']) def testFetchResults(self): \"\"\"Verify that results files/directories are copied", "'run', '-build=false', '-waituntilready', '-remoterunner=%s' % os.path.join(tast_bin_dir, 'remote_test_runner'), '-remotebundledir=%s' % os.path.join(tast_cache_dir, 'tast-remote-tests-cros/usr', 'libexec/tast/bundles/remote'), '-remotedatadir=%s'", "self.TempFilePath('out_amd64-generic/Release') osutils.SafeMakedirs(self._tester.build_dir) isolate_map = self.TempFilePath('testing/buildbot/gn_isolate_map.pyl') # Add info about the specified chrome test", "True self._tester.chrome_test_target = test_exe self._tester.chrome_test_deploy_target_dir = '/usr/local/chrome_test' # test_label looks like //crypto:crypto_unittests. #", "test_args = ['--test-launcher-print-test-stdio=auto'] self.SetUpChromeTest(test_exe, test_label, test_args) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir, test_args) class CrOSTesterParser(CrOSTesterBase):", "Parser error when a file in --files has an absolute path. self.CheckParserError(['--files', '/etc/lsb-release'],", "'libexec/tast/bundles/remote'), '-remotedatadir=%s' % os.path.join( tast_cache_dir, 'tast-remote-tests-cros/usr', 'share/tast/data'), '-ephemeraldevserver=true', '-keyfile', '/tmp/.ssh/testing_rsa', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin'", "'--', 'dbus-send', '--system', '--type=method_call', '--dest=org.chromium.PowerManager', '/org/chromium/PowerManager', 'org.chromium.PowerManager.HandleUserActivity', 'int32:0']) args = ' '.join(test_args) if", "The label of the chrome test. test_args: A list of arguments of the", "'180', '--device', self._tester._device.device + ':9222', '--board', 'amd64-generic', '--cache-dir', self._tester.cache_dir]) def testDeployChromeWithArgs(self): \"\"\"Tests deploy", "autotest directory. self.assertCommandContains('cd /usr/local/autotest && ./bin/vm_sanity.py') def testRunDeviceCmdWithoutSrcFiles(self): \"\"\"Verify running a remote command", "osutils.Touch(tester._device.image_path) version_str = ('QEMU emulator version 2.6.0, Copyright (c) ' '2003-2008 <NAME>') self.rc.AddCmdResult(partial_mock.In('--version'),", "osutils.SafeMakedirs(self._tester.build_dir) isolate_map = self.TempFilePath('testing/buildbot/gn_isolate_map.pyl') # Add info about the specified chrome test to", "'ui.ChromeLogin']) def testExpressionBaseTastTest(self): \"\"\"Verify running a set of tast tests with an expression.\"\"\"", "parser error for --chrome-test when no args are given.\"\"\" self.CheckParserError('--chrome-test', '--chrome-test') def testParserSetsBuildDir(self):", "tast tests from the SimpleChrome SDK.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester._device.private_key = '/tmp/.ssh/testing_rsa' tast_cache_dir", "# test_label looks like //crypto:crypto_unittests. # label_root extracts 'crypto' from the test_label in", "self._tester._device.private_key = '/mnt/host/source/.ssh/testing_rsa' self._tester._RunAutotest() self.assertCommandContains([ '--results_dir', '/mnt/host/source/test_results', '--ssh_private_key', '/mnt/host/source/.ssh/testing_rsa']) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=False) def testOutsideChrootAutotest(self,", "that had previously been mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] # Capture the", "found in the LICENSE file. \"\"\"Unit tests for CrOSTest.\"\"\" from __future__ import print_function", "the cwd.\"\"\" # Parser error if the cwd refers to a parent path.", "files_from is None. files = ['/tmp/filename1', '/tmp/filename2'] self.assertEqual(files, cros_test.FileList(files, None)) # Ensure FileList", "testBasic(self, isrunning_mock): \"\"\"Tests basic functionality.\"\"\" self._tester.Run() isrunning_mock.assert_called() # Run vm_sanity. self.assertCommandContains([ 'ssh', '-p',", "testing. Args: opts: Cmd-line args to cros_test used to build a CrOSTest. Returns:", "\"\"\"Tests chrome test test cases.\"\"\" def SetUpChromeTest(self, test_exe, test_label, test_args=None): \"\"\"Sets configurations necessary", "this source code is governed by a BSD-style license that can be #", "command when src files are not specified. The remote command should not change", "% self._tester.staging_dir, '[root@localhost]:/usr/local/chrome_test']) @mock.patch('chromite.lib.remote_access.RemoteDevice.HasRsync', return_value=False) def testChromeTestSCP(self, rsync_mock): \"\"\"Verify build/deploy and chrome test", "def testDeployChromeWithArgs(self): \"\"\"Tests deploy chrome command with additional arguments.\"\"\" self._tester.deploy = True self._tester.build_dir", "import print_function import os import sys import mock import pytest # pylint: disable=import-error", "extra_env={'CHROMIUM_OUTPUT_DIR': '/some/chromium/dir'}) # Ensure that --host-cmd does not invoke ssh since it runs", "None self._tester._device.device = '172.16.17.32' self._tester.test_that_args = ['--test_that-args', '--allow-chrome-crashes'] cwd = os.path.join('/mnt/host/source', os.path.relpath(os.getcwd(), constants.SOURCE_ROOT))", "cros_set_lsb_release.LSB_KEY_VERSION: '12901.0.0', } self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains( [os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'localhost', 'xbuddy://remote/octopus/R82-12901.0.0'],", "test_label: The label of the chrome test. test_args: A list of arguments of", "_check_inside_chroot_mock): \"\"\"Tests running an autotest from outside the chroot.\"\"\" # Checks that mock", "expected=False) def testHostCmd(self): \"\"\"Verify running a host command.\"\"\" self._tester.host_cmd = True self._tester.build_dir =", "start with ..') # Parser error if the cwd is not an absolute", "# Parser error when a non-existent file is passed to --files. self.CheckParserError(['--files', 'fake/file'],", "is built. test_args: Chrome test arguments. \"\"\" # Ensure chrome is being built.", "'--no-quickmerge', '--ssh_options', '-F /dev/null -i /dev/null', 'localhost:9222', 'accessibility_Sanity']) def testAutotestWithArgs(self): \"\"\"Tests an autotest", "['testAddResults'] self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system', 'testAddResults' ]) def testCatapultAsGuest(self): \"\"\"Verify that", "args)]) def testChromeTestRsync(self): \"\"\"Verify build/deploy and chrome test commands using rsync to copy.\"\"\"", "Python 3.6+' # pylint: disable=protected-access class CrOSTesterBase(cros_test_lib.RunCommandTempDirTestCase): \"\"\"Base class for setup and creating", "Ensure the chrome test is run. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'cd /usr/local/chrome_test", "exist. files_from = self.TempFilePath('file_list') self.assertEqual(files, cros_test.FileList(files, files_from)) # Ensure FileList uses 'files_from' and", "been mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir = '/mnt/host/source/test_results' self._tester._device.private_key = '/mnt/host/source/.ssh/testing_rsa'", "in --files has an absolute path. self.CheckParserError(['--files', '/etc/lsb-release'], 'should be a relative path')", "error for --chrome-test when no args are given.\"\"\" self.CheckParserError('--chrome-test', '--chrome-test') def testParserSetsBuildDir(self): \"\"\"Verify", "['accessibility_Sanity'] self._tester.results_dir = '/mnt/host/source/test_results' self._tester._device.private_key = '/mnt/host/source/.ssh/testing_rsa' self._tester._RunAutotest() self.assertCommandContains([ '--results_dir', '/mnt/host/source/test_results', '--ssh_private_key', '/mnt/host/source/.ssh/testing_rsa'])", "outside the chroot.\"\"\" # Checks that mock version has been called. # TODO(crbug/1065172):", "file has a bad path. self.CheckParserError(['--files', '../some_file'], 'cannot start with ..') # Parser", "# label_root extracts 'crypto' from the test_label in this instance. label_root = test_label.split(':')[0].lstrip('/')", "self.CheckParserError(['--results-src', 'tmp/results'], 'absolute') # Parser error if no results destination dir is given.", "if it fails. self.assertCommandCalled( ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'], check=False, dryrun=False, extra_env={'CHROMIUM_OUTPUT_DIR': '/some/chromium/dir'}) #", "label_root extracts 'crypto' from the test_label in this instance. label_root = test_label.split(':')[0].lstrip('/') #", "self._tester.chrome_test_deploy_target_dir = '/usr/local/chrome_test' # test_label looks like //crypto:crypto_unittests. # label_root extracts 'crypto' from", "-- ' '\"out_amd64-generic/Release/%s %s\"' % (test_exe, args)]) def testChromeTestRsync(self): \"\"\"Verify build/deploy and chrome", "= self.TempFilePath('testing/buildbot/gn_isolate_map.pyl') # Add info about the specified chrome test to the isolate", "are args, but no command. self.CheckParserError('--some_test some_command', '--remote-cmd or --host-cmd or --chrome-test') #", "testParserSetsBuildDir(self): \"\"\"Verify that the build directory is set when not specified.\"\"\" test_dir =", "'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus/R82-12901.0.0']) def testFlashSkip(self): \"\"\"Tests flash command is skipped when not", "the test_label in this instance. label_root = test_label.split(':')[0].lstrip('/') # A few files used", "start with --. self.CheckParserError(['--host-cmd', 'tast', 'run'], 'must start with') def testParserErrorCWD(self): \"\"\"Verify we", "# exception is not raised if it fails. self.assertCommandCalled( ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'],", "tast test.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.Run() self.assertCommandContains(['tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin'])", "'root@localhost', '--', '/usr/local/autotest/bin/vm_sanity.py' ]) def testCatapult(self): \"\"\"Verify catapult test command.\"\"\" self._tester.catapult_tests = ['testAddResults']", "a parent path. self.CheckParserError(['--cwd', '../new_cwd'], 'cwd cannot start with ..') # Parser error", "that we use the correct browser in guest mode.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.guest", "\"\"\"Builds a CrOSTest suitable for testing. Args: opts: Cmd-line args to cros_test used", "chrome test commands when a test arg is given.\"\"\" test_exe = 'crypto_unittests' test_label", "the target directory. self.assertCommandContains('cd /usr/local/cros_test && crypto_unittests ' '--test-launcher-print-test-stdio=always') # Ensure target directory", "# Add info about the specified chrome test to the isolate map. osutils.WriteFile(isolate_map,", "specified.\"\"\" test_dir = self.TempFilePath('out_amd64-generic/Release/crypto_unittests') # Retrieves the build directory from the parsed options.", "'files_from' and ignores 'files'. file_list = ['/tmp/file1', '/tmp/file2', '/tmp/file3'] osutils.WriteFile(files_from, '\\n'.join(file_list)) self.assertEqual(file_list, cros_test.FileList(files,", "'--no-quickmerge', '--ssh_options', '-F /dev/null -i /dev/null', '172.16.17.32', 'accessibility_Sanity'], dryrun=False, enter_chroot=True) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=True) def", "this instance. label_root = test_label.split(':')[0].lstrip('/') # A few files used by the chrome", "files_from)) # Ensure FileList uses 'files_from' and ignores 'files'. file_list = ['/tmp/file1', '/tmp/file2',", "'files'. file_list = ['/tmp/file1', '/tmp/file2', '/tmp/file3'] osutils.WriteFile(files_from, '\\n'.join(file_list)) self.assertEqual(file_list, cros_test.FileList(files, files_from)) class CrOSTesterMiscTests(CrOSTesterBase):", "from the test_label in this instance. label_root = test_label.split(':')[0].lstrip('/') # A few files", "is None. files = ['/tmp/filename1', '/tmp/filename2'] self.assertEqual(files, cros_test.FileList(files, None)) # Ensure FileList returns", "[]) opts.enable_kvm = True # We check if /dev/kvm is writeable to use", "set up method for all tests.\"\"\" self._tester = self.createTester() def TempFilePath(self, file_path): \"\"\"Creates", "'crypto_unittests' test_label = '//crypto:' + test_exe test_args = ['--test-launcher-print-test-stdio=auto'] self.SetUpChromeTest(test_exe, test_label, test_args) self._tester.Run()", "label_root = test_label.split(':')[0].lstrip('/') # A few files used by the chrome test. runtime_deps", "to the device using scp. self.assertCommandContains(['scp', '%s/' % self._tester.staging_dir, 'root@localhost:/usr/local/chrome_test']) rsync_mock.assert_called() def testChromeTestExeArg(self):", "error if no results destination dir is given. self.CheckParserError(['--results-src', '/tmp/results'], 'with results-src') #", "is responsive. self.assertCommandContains( ['ssh', '-p', '9222', 'root@localhost', '--', 'true']) def testStartVMCustomPort(self): \"\"\"Verify that", "on the target. \"\"\" self._tester.remote_cmd = True self._tester.args = ['/usr/local/autotest/bin/vm_sanity.py'] self._tester.Run() self.assertCommandContains(['ssh', '-p',", "osutils.Touch(os.path.join(self._tester.build_dir, dep), makedirs=True) # Mocks the output by providing necessary runtime files. self.rc.AddCmdResult(", "= True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12900.0.0', } self._tester.Run() self.assertCommandContains([", "files_from)) class CrOSTesterMiscTests(CrOSTesterBase): \"\"\"Tests miscellaneous test cases.\"\"\" @mock.patch('chromite.lib.vm.VM.IsRunning', return_value=True) def testBasic(self, isrunning_mock): \"\"\"Tests", "command. self.CheckParserError('--some_test some_command', '--remote-cmd or --host-cmd or --chrome-test') # Parser error when additional", "for tests.\"\"\" self._tester = self.createTester(opts=['--ssh-port=12345']) self._tester.start_vm = True self._tester.Run() # Check that we", "Putting outcap.OutputCapturer() before assertRaises(SystemExit) # swallows SystemExit exception check. with self.assertRaises(SystemExit): with outcap.OutputCapturer()", "def testOutsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an autotest from outside the chroot.\"\"\" # Checks", "= 'test_results' self._tester._device.private_key = '.ssh/testing_rsa' self._tester._device.log_level = 'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port =", "= ['--test_that-args', '--allow-chrome-crashes'] cwd = os.path.join('/mnt/host/source', os.path.relpath(os.getcwd(), constants.SOURCE_ROOT)) test_results_dir = os.path.join(cwd, 'test_results') testing_rsa_dir", "= ['accessibility_Sanity'] self._tester.results_dir = 'test_results' self._tester._device.private_key = '.ssh/testing_rsa' self._tester._device.log_level = 'debug' self._tester._device.should_start_vm =", "started before running tests.\"\"\" self._tester.start_vm = True self._tester.Run() # Check if new VM", "= 'amd64-generic' tester._device.image_path = self.TempFilePath( 'chromiumos_qemu_image.bin') osutils.Touch(tester._device.image_path) version_str = ('QEMU emulator version 2.6.0,", "does not exist. files_from = self.TempFilePath('file_list') self.assertEqual(files, cros_test.FileList(files, files_from)) # Ensure FileList uses", "self._tester.build_dir) # Ensure files are being copied over to the device using scp.", "\"\"\"{ \"%s\": { \"label\": \"%s\", \"type\": \"console_test_launcher\", } }\"\"\" % (test_exe, test_label), makedirs=True)", "def testParserErrorBuild(self): \"\"\"Verify parser errors for building/deploying Chrome.\"\"\" # Parser error if no", "specifying the board gets the latest canary. self._tester.flash = True self._tester.public_image = True", "..') # Parser error if the cwd is not an absolute path. self.CheckParserError(['--cwd',", "file in --files has an absolute path. self.CheckParserError(['--files', '/etc/lsb-release'], 'should be a relative", "'((\"dep:chrome\" || \"dep:android\") && !flaky && !disabled)' ] self._tester.Run() self.assertCommandContains([ 'tast', 'run', '-build=false',", "test_args: A list of arguments of the particular chrome test. \"\"\" self._tester.args =", "'testAddResults' ]) def testRunDeviceCmd(self): \"\"\"Verify a run device cmd call.\"\"\" self._tester.remote_cmd = True", "an absolute path. self.CheckParserError(['--cwd', 'tmp/cwd'], 'cwd must be an absolute path') def testParserErrorFiles(self):", "user activity ping is sent to the device. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--',", "'--files and --files-from') # Parser error when --files-from does not exist. self.CheckParserError(['--files-from', '/fake/file'],", "self.assertCommandContains(['autoninja', '-C', build_dir, test_exe]) # Ensure that the runtime dependencies are checked for.", "Copyright 2019 The Chromium OS Authors. All rights reserved. # Use of this", "raised if it fails. self.assertCommandCalled( ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'], check=False, dryrun=False, extra_env={'CHROMIUM_OUTPUT_DIR': '/some/chromium/dir'})", "test command.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system', 'testAddResults' ])", "dryrun=False, enter_chroot=True) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=True) def testInsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an autotest from within", "'--', test_dir]).build_dir self.assertEqual(build_dir, os.path.dirname(test_dir)) def testParserErrorBuild(self): \"\"\"Verify parser errors for building/deploying Chrome.\"\"\" #", "not exist. files_from = self.TempFilePath('file_list') self.assertEqual(files, cros_test.FileList(files, files_from)) # Ensure FileList uses 'files_from'", "'amd64-generic', '--cache-dir', self._tester.cache_dir]) def testDeployChromeWithArgs(self): \"\"\"Tests deploy chrome command with additional arguments.\"\"\" self._tester.deploy", "OS Authors. All rights reserved. # Use of this source code is governed", "[ '((\"dep:chrome\" || \"dep:android\") && !flaky && !disabled)' ] self._tester.Run() self.assertCommandContains([ 'tast', 'run',", "self.TempFilePath('file_list') self.assertEqual(files, cros_test.FileList(files, files_from)) # Ensure FileList uses 'files_from' and ignores 'files'. file_list", "chromite.lib import osutils from chromite.lib import partial_mock from chromite.scripts import cros_set_lsb_release from chromite.utils", "# Creates the test_exe to be an executable. osutils.Touch(os.path.join(self._tester.build_dir, runtime_deps[0]), mode=0o700) for dep", "port when talking to the VM. self.assertCommandContains( ['ssh', '-p', '12345', 'root@localhost', '--', 'true'])", "outcap.OutputCapturer() as output: cros_test.ParseCommandLine(args) self.assertIn(error_msg, output.GetStderr()) def testParserErrorChromeTest(self): \"\"\"Verify we get a parser", "test with various arguments.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.test_timeout = 100 self._tester._device.log_level = 'debug'", "self._tester.results_dest_dir]) def testFileList(self): \"\"\"Verify that FileList returns the correct files.\"\"\" # Ensure FileList", "self.assertCommandContains(['rsync', '%s/' % self._tester.staging_dir, '[root@localhost]:/usr/local/chrome_test']) @mock.patch('chromite.lib.remote_access.RemoteDevice.HasRsync', return_value=False) def testChromeTestSCP(self, rsync_mock): \"\"\"Verify build/deploy and", "self.CheckParserError(['--deploy', '--build-dir', '/not/a/directory'], 'not a directory') def testParserErrorResultsSrc(self): \"\"\"Verify parser errors for results", "def testDeployChrome(self): \"\"\"Tests basic deploy chrome command.\"\"\" self._tester.deploy = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release')", "but no command. self.CheckParserError('--some_test some_command', '--remote-cmd or --host-cmd or --chrome-test') # Parser error", "with ..') # Parser error if the cwd is not an absolute path.", "test_label, 'runtime_deps']) # Ensure UI is stopped so the test can grab the", "'-resultsdir', '/tmp/results', '172.16.17.32', 'ui.ChromeLogin']) def testTastTestSDK(self): \"\"\"Verify running tast tests from the SimpleChrome", "self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus-full/latest',]) # Specify an xbuddy link. self._tester.xbuddy", "'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12901.0.0', } self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains( [os.path.join(constants.CHROMITE_BIN_DIR,", "testDeployChromeWithArgs(self): \"\"\"Tests deploy chrome command with additional arguments.\"\"\" self._tester.deploy = True self._tester.build_dir =", "assertRaises(SystemExit) # swallows SystemExit exception check. with self.assertRaises(SystemExit): with outcap.OutputCapturer() as output: cros_test.ParseCommandLine(args)", "self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12900.0.0', } self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus-full/latest',])", "--files-from are specified. self.CheckParserError(['--files', 'file_list', '--files-from', 'file'], '--files and --files-from') # Parser error", "self._tester.Run() self.assertCommandContains([ 'tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', '((\"dep:chrome\" || \"dep:android\") && !flaky", "directory') def testParserErrorResultsSrc(self): \"\"\"Verify parser errors for results src/dest directories.\"\"\" # Parser error", "def testTastTestWithOtherArgs(self, check_inside_chroot_mock): \"\"\"Verify running a single tast test with various arguments.\"\"\" self._tester.tast", "when files_from does not exist. files_from = self.TempFilePath('file_list') self.assertEqual(files, cros_test.FileList(files, files_from)) # Ensure", "'/mnt/host/source/test_results', '--ssh_private_key', '/mnt/host/source/.ssh/testing_rsa']) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=False) def testOutsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an autotest from", "test_label, build_dir, test_args=None): \"\"\"Checks to see that chrome test commands ran properly. Args:", "had previously been mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir = '/mnt/host/source/test_results' self._tester._device.private_key", "= 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains( [os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'localhost', 'xbuddy://remote/octopus/R82-12901.0.0'], expected=False) def testDeployChrome(self): \"\"\"Tests", "from chromite.scripts import cros_set_lsb_release from chromite.utils import outcap pytestmark = cros_test_lib.pytestmark_inside_only assert sys.version_info", "dep in runtime_deps[1:]: osutils.Touch(os.path.join(self._tester.build_dir, dep), makedirs=True) # Mocks the output by providing necessary", "# Parser error if --results-src is not absolute. self.CheckParserError(['--results-src', 'tmp/results'], 'absolute') # Parser", "an absolute path. self.CheckParserError(['--files', '/etc/lsb-release'], 'should be a relative path') # Parser error", "the runtime dependencies are checked for. self.assertCommandContains(['gn', 'desc', build_dir, test_label, 'runtime_deps']) # Ensure", "'./%s' % test_exe, 'gen.runtime/%s/%s/%s.runtime_deps' % (label_root, test_exe, test_exe), '../../third_party/chromite'] # Creates the test_exe", "args, error_msg): \"\"\"Checks that parser error is raised. Args: args: List of commandline", "CrOSTesterTast(CrOSTesterBase): \"\"\"Tests tast test cases.\"\"\" def testSingleBaseTastTest(self): \"\"\"Verify running a single tast test.\"\"\"", "test is run. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'cd /usr/local/chrome_test && su chronos", "cros_sdk wrapper. self._tester._RunAutotest() # Check that we enter the chroot before running test_that.", "'--board', 'amd64-generic', '--results_dir', test_results_dir, '--ssh_private_key', testing_rsa_dir, '--debug', '--allow-chrome-crashes', '--no-quickmerge', '--ssh_options', '-F /dev/null -i", "os.path.dirname(test_dir)) def testParserErrorBuild(self): \"\"\"Verify parser errors for building/deploying Chrome.\"\"\" # Parser error if", "files are not specified. The remote command should not change the working directory", "def CheckChromeTestCommands(self, test_exe, test_label, build_dir, test_args=None): \"\"\"Checks to see that chrome test commands", "--results-src is not absolute. self.CheckParserError(['--results-src', 'tmp/results'], 'absolute') # Parser error if no results", "dep), makedirs=True) # Mocks the output by providing necessary runtime files. self.rc.AddCmdResult( partial_mock.InOrder(['gn',", "cros_test.ParseCommandLine(args) self.assertIn(error_msg, output.GetStderr()) def testParserErrorChromeTest(self): \"\"\"Verify we get a parser error for --chrome-test", "tast_cache_dir = cros_test_lib.FakeSDKCache( self._tester.cache_dir).CreateCacheReference( self._tester._device.board, 'chromeos-base') tast_bin_dir = os.path.join(tast_cache_dir, 'tast-cmd/usr/bin') osutils.SafeMakedirs(tast_bin_dir) self._tester.Run() self.assertCommandContains([", "= self.createTester(opts=['--ssh-port=12345']) self._tester.start_vm = True self._tester.Run() # Check that we use the custom", "'../new_cwd'], 'cwd cannot start with ..') # Parser error if the cwd is", "self._tester.remote_cmd = True self._tester.cwd = '/usr/local/autotest' self._tester.args = ['./bin/vm_sanity.py'] self._tester.Run() # Ensure command", "def createTester(self, opts=None): \"\"\"Builds a CrOSTest suitable for testing. Args: opts: Cmd-line args", "-- test_that --board amd64-generic --no-quickmerge' \" --ssh_options '-F /dev/null -i /dev/null' localhost:9222\" '", "error if --results-src is not absolute. self.CheckParserError(['--results-src', 'tmp/results'], 'absolute') # Parser error if", "test_args if test_args else [test_exe] self._tester.chrome_test = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') osutils.SafeMakedirs(self._tester.build_dir) isolate_map", "the device using scp. self.assertCommandContains(['scp', '%s/' % self._tester.staging_dir, 'root@localhost:/usr/local/chrome_test']) rsync_mock.assert_called() def testChromeTestExeArg(self): \"\"\"Verify", "self.caplog.text) class CrOSTesterTast(CrOSTesterBase): \"\"\"Tests tast test cases.\"\"\" def testSingleBaseTastTest(self): \"\"\"Verify running a single", "\"\"\"Verify parser errors for results src/dest directories.\"\"\" # Parser error if --results-src is", "has ownership of the directory. self.assertCommandContains(['chown', '-R', 'chronos:', '/usr/local/cros_test']) # Ensure command runs", "flash command.\"\"\" # Verify that specifying the board gets the latest canary. self._tester.flash", "self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir) # Ensure files are being copied over to the", "= [self.TempFilePath('crypto_unittests')] osutils.Touch(self._tester.files[0], mode=0o700) self._tester.as_chronos = True self._tester.args = ['crypto_unittests', '--test-launcher-print-test-stdio=always'] self._tester.Run() #", "/dev/kvm is writeable to use sudo. with mock.patch.object(os, 'access', return_value=True): tester = cros_test.CrOSTest(opts)", "Ensure UI is stopped so the test can grab the GPU if needed.", "[args] # Putting outcap.OutputCapturer() before assertRaises(SystemExit) # swallows SystemExit exception check. with self.assertRaises(SystemExit):", "activity ping is sent to the device. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'dbus-send',", "% (label_root, test_exe, test_exe), '../../third_party/chromite'] # Creates the test_exe to be an executable.", "True self._tester.Run() # Check that we use the custom port when talking to", "True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') osutils.SafeMakedirs(self._tester.build_dir) isolate_map = self.TempFilePath('testing/buildbot/gn_isolate_map.pyl') # Add info about the", "ping is sent to the device. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'dbus-send', '--system',", "LICENSE file. \"\"\"Unit tests for CrOSTest.\"\"\" from __future__ import print_function import os import", "'not a directory') def testParserErrorResultsSrc(self): \"\"\"Verify parser errors for results src/dest directories.\"\"\" #", "'--system', '--type=method_call', '--dest=org.chromium.PowerManager', '/org/chromium/PowerManager', 'org.chromium.PowerManager.HandleUserActivity', 'int32:0']) args = ' '.join(test_args) if test_args else", "Ensure chronos has ownership of the directory. self.assertCommandContains(['chown', '-R', 'chronos:', '/usr/local/cros_test']) # Ensure", "self._tester.chrome_test_target = test_exe self._tester.chrome_test_deploy_target_dir = '/usr/local/chrome_test' # test_label looks like //crypto:crypto_unittests. # label_root", "self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system', 'testAddResults' ]) def testCatapultAsGuest(self): \"\"\"Verify that we", "working directory or create a temp directory on the target. \"\"\" self._tester.remote_cmd =", "self._tester.args = [test_exe] + test_args if test_args else [test_exe] self._tester.chrome_test = True self._tester.build_dir", "cros_set_lsb_release.LSB_KEY_VERSION: '12900.0.0', } self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus-full/latest',]) # Specify an", "test arguments. \"\"\" # Ensure chrome is being built. self.assertCommandContains(['autoninja', '-C', build_dir, test_exe])", "that FileList returns the correct files.\"\"\" # Ensure FileList returns files when files_from", "3.6+' # pylint: disable=protected-access class CrOSTesterBase(cros_test_lib.RunCommandTempDirTestCase): \"\"\"Base class for setup and creating a", "} self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains( [os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'localhost', 'xbuddy://remote/octopus/R82-12901.0.0'], expected=False) def", "CheckChromeTestCommands(self, test_exe, test_label, build_dir, test_args=None): \"\"\"Checks to see that chrome test commands ran", "file. \"\"\"Unit tests for CrOSTest.\"\"\" from __future__ import print_function import os import sys", "True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12901.0.0', } self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0'", "self._tester._device.log_level = 'debug' self._tester._device.should_start_vm = False self._tester._device.ssh_port = None self._tester._device.device = '172.16.17.32' self._tester.results_dir", "exist. self.CheckParserError(['--files-from', '/fake/file'], 'is not a file') # Parser error when a file", "creating a temp file path.\"\"\" def createTester(self, opts=None): \"\"\"Builds a CrOSTest suitable for", "source is given. self.CheckParserError(['--results-dest-dir', '/tmp/dest_dir'], 'with results-dest-dir') # Parser error if results destination", "check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] # Capture the run command. This is necessary beacuse", "[test_exe] + test_args if test_args else [test_exe] self._tester.chrome_test = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release')", "info about the specified chrome test to the isolate map. osutils.WriteFile(isolate_map, \"\"\"{ \"%s\":", "test. Args: test_exe: The name of the chrome test. test_label: The label of", "(c) ' '2003-2008 <NAME>') self.rc.AddCmdResult(partial_mock.In('--version'), output=version_str) return tester def setUp(self): \"\"\"Common set up", "Ensure chrome is being built. self.assertCommandContains(['autoninja', '-C', build_dir, test_exe]) # Ensure that the", "None self._tester._device.device = '172.16.17.32' self._tester.results_dir = '/tmp/results' self._tester.Run() check_inside_chroot_mock.assert_called() self.assertCommandContains(['tast', '-verbose', 'run', '-build=false',", "var for the build dir, and ensure an # exception is not raised", "/dev/null -i /dev/null' localhost:9222\" ' accessibility_Sanity'), self.caplog.text) class CrOSTesterTast(CrOSTesterBase): \"\"\"Tests tast test cases.\"\"\"", "path. self.CheckParserError(['--files', '../some_file'], 'cannot start with ..') # Parser error when a non-existent", "makedirs=True) # Mocks the output by providing necessary runtime files. self.rc.AddCmdResult( partial_mock.InOrder(['gn', 'desc',", "'root@localhost', '--', 'true']) def testFlash(self): \"\"\"Tests flash command.\"\"\" # Verify that specifying the", "# Ensure files are being copied over to the device using rsync. self.assertCommandContains(['rsync',", "'-F /dev/null -i /dev/null', 'localhost:9222', 'accessibility_Sanity']) def testAutotestWithArgs(self): \"\"\"Tests an autotest call with", "return_value=True) def testInsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an autotest from within the chroot.\"\"\" #", "Invalid assertion that had previously been mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] #", "necessary for running a chrome test. Args: test_exe: The name of the chrome", "self._tester.cwd = '/usr/local/autotest' self._tester.args = ['./bin/vm_sanity.py'] self._tester.Run() # Ensure command runs in the", "test_label), makedirs=True) self._tester.build = True self._tester.deploy = True self._tester.chrome_test_target = test_exe self._tester.chrome_test_deploy_target_dir =", "test. build_dir: The directory where chrome is built. test_args: Chrome test arguments. \"\"\"", "test_label.split(':')[0].lstrip('/') # A few files used by the chrome test. runtime_deps = [", "cases.\"\"\" def SetUpChromeTest(self, test_exe, test_label, test_args=None): \"\"\"Sets configurations necessary for running a chrome", "directory where chrome is built. test_args: Chrome test arguments. \"\"\" # Ensure chrome", "class CrOSTesterParser(CrOSTesterBase): \"\"\"Tests parser test cases.\"\"\" def CheckParserError(self, args, error_msg): \"\"\"Checks that parser", "the duration of a test.\"\"\" return os.path.join(self.tempdir, file_path) class CrOSTester(CrOSTesterBase): \"\"\"Tests miscellaneous utility", "testHostCmd(self): \"\"\"Verify running a host command.\"\"\" self._tester.host_cmd = True self._tester.build_dir = '/some/chromium/dir' self._tester.args", "self._tester._device.board, 'chromeos-base') tast_bin_dir = os.path.join(tast_cache_dir, 'tast-cmd/usr/bin') osutils.SafeMakedirs(tast_bin_dir) self._tester.Run() self.assertCommandContains([ os.path.join(tast_bin_dir, 'tast'), 'run', '-build=false',", "testFetchResults(self): \"\"\"Verify that results files/directories are copied from the DUT.\"\"\" self._tester.results_src = ['/tmp/results/cmd_results',", "self._tester.as_chronos = True self._tester.args = ['crypto_unittests', '--test-launcher-print-test-stdio=always'] self._tester.Run() # Ensure target directory is", "is run with an env var for the build dir, and ensure an", "testDeployChrome(self): \"\"\"Tests basic deploy chrome command.\"\"\" self._tester.deploy = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.Run()", "testFlash(self): \"\"\"Tests flash command.\"\"\" # Verify that specifying the board gets the latest", "% os.path.join( tast_cache_dir, 'tast-remote-tests-cros/usr', 'share/tast/data'), '-ephemeraldevserver=true', '-keyfile', '/tmp/.ssh/testing_rsa', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin' ]) class", "self.CheckParserError(['--cwd', '../new_cwd'], 'cwd cannot start with ..') # Parser error if the cwd", "self._tester.flash = True self._tester.public_image = True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION:", "not raised if it fails. self.assertCommandCalled( ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'], check=False, dryrun=False, extra_env={'CHROMIUM_OUTPUT_DIR':", "using scp. self.assertCommandContains(['scp', '%s/' % self._tester.staging_dir, 'root@localhost:/usr/local/chrome_test']) rsync_mock.assert_called() def testChromeTestExeArg(self): \"\"\"Verify build/deploy and", "guest mode.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.guest = True self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests',", "@mock.patch('chromite.lib.vm.VM.IsRunning', return_value=True) def testBasic(self, isrunning_mock): \"\"\"Tests basic functionality.\"\"\" self._tester.Run() isrunning_mock.assert_called() # Run vm_sanity.", "/dev/null' localhost:9222\" ' accessibility_Sanity'), self.caplog.text) class CrOSTesterTast(CrOSTesterBase): \"\"\"Tests tast test cases.\"\"\" def testSingleBaseTastTest(self):", "CrOSTesterMiscTests(CrOSTesterBase): \"\"\"Tests miscellaneous test cases.\"\"\" @mock.patch('chromite.lib.vm.VM.IsRunning', return_value=True) def testBasic(self, isrunning_mock): \"\"\"Tests basic functionality.\"\"\"", "given.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:' + test_exe test_args = ['--test-launcher-print-test-stdio=auto'] self.SetUpChromeTest(test_exe,", "a file') # Parser error when a file in --files has an absolute", "the output by providing necessary runtime files. self.rc.AddCmdResult( partial_mock.InOrder(['gn', 'desc', test_label]), output='\\n'.join(runtime_deps)) def", "test_label, test_args) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir, test_args) class CrOSTesterParser(CrOSTesterBase): \"\"\"Tests parser test cases.\"\"\"", "command is provided. self.CheckParserError('--remote-cmd', 'specify test command') # Parser error if using chronos", "when additional args don't start with --. self.CheckParserError(['--host-cmd', 'tast', 'run'], 'must start with')", "{ cros_set_lsb_release.LSB_KEY_VERSION: '12900.0.0', } self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus-full/latest',]) # Specify", "a test.\"\"\" return os.path.join(self.tempdir, file_path) class CrOSTester(CrOSTesterBase): \"\"\"Tests miscellaneous utility methods\"\"\" def testStartVM(self):", "at the end of the test. self.assertCommandContains(['rm', '-rf', '/usr/local/cros_test']) def testRunDeviceCmdWithSetCwd(self): \"\"\"Verify a", "'--ssh_private_key', testing_rsa_dir, '--debug', '--allow-chrome-crashes', '--no-quickmerge', '--ssh_options', '-F /dev/null -i /dev/null', '172.16.17.32', 'accessibility_Sanity'], dryrun=False,", "# A few files used by the chrome test. runtime_deps = [ './%s'", "%s\"' % (test_exe, args)]) def testChromeTestRsync(self): \"\"\"Verify build/deploy and chrome test commands using", "copied from the DUT.\"\"\" self._tester.results_src = ['/tmp/results/cmd_results', '/tmp/results/filename.txt', '/tmp/results/test_results'] self._tester.results_dest_dir = self.TempFilePath('results_dir') osutils.SafeMakedirs(self._tester.results_dest_dir)", "'--build-dir', '/not/a/directory'], 'not a directory') def testParserErrorResultsSrc(self): \"\"\"Verify parser errors for results src/dest", "= ['accessibility_Sanity'] self._tester.Run() # Check VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Checks that", "def setUp(self): \"\"\"Common set up method for all tests.\"\"\" self._tester = self.createTester() def", "up method for all tests.\"\"\" self._tester = self.createTester() def TempFilePath(self, file_path): \"\"\"Creates a", "is skipped when not needed.\"\"\" self._tester.flash = True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release =", "'/usr/local/autotest' self._tester.args = ['./bin/vm_sanity.py'] self._tester.Run() # Ensure command runs in the autotest directory.", "command runs in the target directory. self.assertCommandContains('cd /usr/local/cros_test && crypto_unittests ' '--test-launcher-print-test-stdio=always') #", "mock import pytest # pylint: disable=import-error from chromite.lib import constants from chromite.lib import", "chrome test. runtime_deps = [ './%s' % test_exe, 'gen.runtime/%s/%s/%s.runtime_deps' % (label_root, test_exe, test_exe),", "False self._tester._device.ssh_port = None self._tester._device.device = '172.16.17.32' self._tester.results_dir = '/tmp/results' self._tester.Run() check_inside_chroot_mock.assert_called() self.assertCommandContains(['tast',", "'as-chronos') # Parser error if there are args, but no command. self.CheckParserError('--some_test some_command',", "crypto_unittests ' '--test-launcher-print-test-stdio=always') # Ensure target directory is removed at the end of", "self._tester.results_dir = '/mnt/host/source/test_results' self._tester._device.private_key = '/mnt/host/source/.ssh/testing_rsa' self._tester._RunAutotest() self.assertCommandContains([ '--results_dir', '/mnt/host/source/test_results', '--ssh_private_key', '/mnt/host/source/.ssh/testing_rsa']) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot',", "error is raised. Args: args: List of commandline arguments. error_msg: Error message to", "import osutils from chromite.lib import partial_mock from chromite.scripts import cros_set_lsb_release from chromite.utils import", "'tmp/results'], 'absolute') # Parser error if no results destination dir is given. self.CheckParserError(['--results-src',", "'-extrauseflags=tast_vm', 'localhost:9222', '((\"dep:chrome\" || \"dep:android\") && !flaky && !disabled)' ]) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot') def testTastTestWithOtherArgs(self,", "the chrome test is run. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'cd /usr/local/chrome_test &&", "file_path): \"\"\"Creates a temporary file path lasting for the duration of a test.\"\"\"", "'telemetry/bin/run_tests', '--browser=system', 'testAddResults' ]) def testCatapultAsGuest(self): \"\"\"Verify that we use the correct browser", "= None self._tester._device.device = '172.16.17.32' self._tester.results_dir = '/tmp/results' self._tester.Run() check_inside_chroot_mock.assert_called() self.assertCommandContains(['tast', '-verbose', 'run',", "self.assertIn(error_msg, output.GetStderr()) def testParserErrorChromeTest(self): \"\"\"Verify we get a parser error for --chrome-test when", "test cases.\"\"\" def SetUpChromeTest(self, test_exe, test_label, test_args=None): \"\"\"Sets configurations necessary for running a", "partial_mock.InOrder(['gn', 'desc', test_label]), output='\\n'.join(runtime_deps)) def CheckChromeTestCommands(self, test_exe, test_label, build_dir, test_args=None): \"\"\"Checks to see", "Invalid assertion that had previously been mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir", "as a string. if isinstance(args, str): args = [args] # Putting outcap.OutputCapturer() before", "'/usr/local/cros_test']) # Ensure test ssh keys are authorized with chronos. self.assertCommandContains(['cp', '-r', '/root/.ssh/',", "--ssh_options '-F /dev/null -i /dev/null' localhost:9222\" ' accessibility_Sanity'), self.caplog.text) class CrOSTesterTast(CrOSTesterBase): \"\"\"Tests tast", "device using rsync. self.assertCommandContains(['rsync', '%s/' % self._tester.staging_dir, '[root@localhost]:/usr/local/chrome_test']) @mock.patch('chromite.lib.remote_access.RemoteDevice.HasRsync', return_value=False) def testChromeTestSCP(self, rsync_mock):", "the LICENSE file. \"\"\"Unit tests for CrOSTest.\"\"\" from __future__ import print_function import os", "= { cros_set_lsb_release.LSB_KEY_VERSION: '12900.0.0', } self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus-full/latest',]) #", "'172.16.17.32', 'ui.ChromeLogin']) def testTastTestSDK(self): \"\"\"Verify running tast tests from the SimpleChrome SDK.\"\"\" self._tester.tast", "Specify an xbuddy link. self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222',", "wrapper. self._tester._RunAutotest() # Check that we enter the chroot before running test_that. self.assertIn(('cros_sdk", "\"\"\" # Recreate args as a list if it is given as a", "TempFilePath(self, file_path): \"\"\"Creates a temporary file path lasting for the duration of a", "True self._tester.cwd = '/usr/local/autotest' self._tester.args = ['./bin/vm_sanity.py'] self._tester.Run() # Ensure command runs in", "\"\"\"Verify build/deploy and chrome test commands using scp to copy.\"\"\" test_exe = 'crypto_unittests'", "True self._tester.args = ['crypto_unittests', '--test-launcher-print-test-stdio=always'] self._tester.Run() # Ensure target directory is created on", "tast_bin_dir = os.path.join(tast_cache_dir, 'tast-cmd/usr/bin') osutils.SafeMakedirs(tast_bin_dir) self._tester.Run() self.assertCommandContains([ os.path.join(tast_bin_dir, 'tast'), 'run', '-build=false', '-waituntilready', '-remoterunner=%s'", "check if /dev/kvm is writeable to use sudo. with mock.patch.object(os, 'access', return_value=True): tester", "a parser error for --chrome-test when no args are given.\"\"\" self.CheckParserError('--chrome-test', '--chrome-test') def", "supported for tests.\"\"\" self._tester = self.createTester(opts=['--ssh-port=12345']) self._tester.start_vm = True self._tester.Run() # Check that", "# Check if new VM is responsive. self.assertCommandContains( ['ssh', '-p', '9222', 'root@localhost', '--',", "with an env var for the build dir, and ensure an # exception", "= '/mnt/host/source/test_results' self._tester._device.private_key = '/mnt/host/source/.ssh/testing_rsa' self._tester._RunAutotest() self.assertCommandContains([ '--results_dir', '/mnt/host/source/test_results', '--ssh_private_key', '/mnt/host/source/.ssh/testing_rsa']) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=False)", "# Check that we enter the chroot before running test_that. self.assertIn(('cros_sdk -- test_that", "Ensure files are being copied over to the device using rsync. self.assertCommandContains(['rsync', '%s/'", "\"\"\" # Ensure chrome is being built. self.assertCommandContains(['autoninja', '-C', build_dir, test_exe]) # Ensure", "particular chrome test. \"\"\" self._tester.args = [test_exe] + test_args if test_args else [test_exe]", "call when giving a cwd.\"\"\" self._tester.remote_cmd = True self._tester.cwd = '/usr/local/autotest' self._tester.args =", "/usr/local/chrome_test && su chronos -c -- ' '\"out_amd64-generic/Release/%s %s\"' % (test_exe, args)]) def", "&& /usr/local/autotest/bin/' 'vm_sanity.py' % self._tester.cwd], expected=False) self.assertCommandContains(['rm', '-rf'], expected=False) def testHostCmd(self): \"\"\"Verify running", "build_dir: The directory where chrome is built. test_args: Chrome test arguments. \"\"\" #", "self._tester.mount = True self._tester.Run() self.assertCommandContains(['--nostrip', '--mount']) def testFetchResults(self): \"\"\"Verify that results files/directories are", "directory or create a temp directory on the target. \"\"\" self._tester.remote_cmd = True", "chrome test. test_label: The label of the chrome test. build_dir: The directory where", "'/tmp/results'], 'with results-src') # Parser error if no results source is given. self.CheckParserError(['--results-dest-dir',", "opts: Cmd-line args to cros_test used to build a CrOSTest. Returns: An instance", "by the chrome test. runtime_deps = [ './%s' % test_exe, 'gen.runtime/%s/%s/%s.runtime_deps' % (label_root,", "= [test_exe] + test_args if test_args else [test_exe] self._tester.chrome_test = True self._tester.build_dir =", "launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Checks that autotest is running. self.assertCommandContains([ 'test_that', '--no-quickmerge', '--ssh_options',", "isolate_map = self.TempFilePath('testing/buildbot/gn_isolate_map.pyl') # Add info about the specified chrome test to the", "returns the correct files.\"\"\" # Ensure FileList returns files when files_from is None.", "\"\"\"Verify running a set of tast tests with an expression.\"\"\" self._tester.tast = [", "@mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=True) def testInsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests running an autotest from within the chroot.\"\"\"", "of the chrome test. test_args: A list of arguments of the particular chrome", "self.assertCommandContains( ['ssh', '-p', '12345', 'root@localhost', '--', 'true']) def testFlash(self): \"\"\"Tests flash command.\"\"\" #", "!disabled)' ]) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot') def testTastTestWithOtherArgs(self, check_inside_chroot_mock): \"\"\"Verify running a single tast test with", "if build directory is not an existing directory. self.CheckParserError(['--deploy', '--build-dir', '/not/a/directory'], 'not a", "= ['accessibility_Sanity'] # Capture the run command. This is necessary beacuse the mock", "= cros_test.ParseCommandLine( ['--chrome-test', '--', test_dir]).build_dir self.assertEqual(build_dir, os.path.dirname(test_dir)) def testParserErrorBuild(self): \"\"\"Verify parser errors for", "'desc', build_dir, test_label, 'runtime_deps']) # Ensure UI is stopped so the test can", "\"\"\"Tests tast test cases.\"\"\" def testSingleBaseTastTest(self): \"\"\"Verify running a single tast test.\"\"\" self._tester.tast", "the device. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'dbus-send', '--system', '--type=method_call', '--dest=org.chromium.PowerManager', '/org/chromium/PowerManager', 'org.chromium.PowerManager.HandleUserActivity',", "= cros_test.CrOSTest(opts) tester._device.use_sudo = False tester._device.board = 'amd64-generic' tester._device.image_path = self.TempFilePath( 'chromiumos_qemu_image.bin') osutils.Touch(tester._device.image_path)", "'int32:0']) args = ' '.join(test_args) if test_args else '' # Ensure the chrome", "basic deploy chrome command.\"\"\" self._tester.deploy = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.Run() self.assertCommandContains(['deploy_chrome', '--force',", "class CrOSTesterChromeTest(CrOSTesterBase): \"\"\"Tests chrome test test cases.\"\"\" def SetUpChromeTest(self, test_exe, test_label, test_args=None): \"\"\"Sets", "being copied over to the device using rsync. self.assertCommandContains(['rsync', '%s/' % self._tester.staging_dir, '[root@localhost]:/usr/local/chrome_test'])", "def testRunDeviceCmdWithoutSrcFiles(self): \"\"\"Verify running a remote command when src files are not specified.", "# Parser error if the cwd refers to a parent path. self.CheckParserError(['--cwd', '../new_cwd'],", "/usr/local/autotest && ./bin/vm_sanity.py') def testRunDeviceCmdWithoutSrcFiles(self): \"\"\"Verify running a remote command when src files", "chrome test. \"\"\" self._tester.args = [test_exe] + test_args if test_args else [test_exe] self._tester.chrome_test", "'--dest=org.chromium.PowerManager', '/org/chromium/PowerManager', 'org.chromium.PowerManager.HandleUserActivity', 'int32:0']) args = ' '.join(test_args) if test_args else '' #", "tester def setUp(self): \"\"\"Common set up method for all tests.\"\"\" self._tester = self.createTester()", "governed by a BSD-style license that can be # found in the LICENSE", "'9222', 'root@localhost', '--', 'cd /usr/local/chrome_test && su chronos -c -- ' '\"out_amd64-generic/Release/%s %s\"'", "self.rc.AddCmdResult(partial_mock.In('--version'), output=version_str) return tester def setUp(self): \"\"\"Common set up method for all tests.\"\"\"", "self.CheckParserError(['--files', '../some_file'], 'cannot start with ..') # Parser error when a non-existent file", "FileList returns files when files_from does not exist. files_from = self.TempFilePath('file_list') self.assertEqual(files, cros_test.FileList(files,", "specifying the cwd.\"\"\" # Parser error if the cwd refers to a parent", "the chrome test. test_label: The label of the chrome test. test_args: A list", "a user activity ping is sent to the device. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost',", "various arguments.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.test_timeout = 100 self._tester._device.log_level = 'debug' self._tester._device.should_start_vm =", "= '//crypto:' + test_exe test_args = ['--test-launcher-print-test-stdio=auto'] self.SetUpChromeTest(test_exe, test_label, test_args) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label,", "error if using chronos without a test command. self.CheckParserError('--as-chronos', 'as-chronos') # Parser error", "= 'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12900.0.0', } self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash',", "when files_from is None. files = ['/tmp/filename1', '/tmp/filename2'] self.assertEqual(files, cros_test.FileList(files, None)) # Ensure", "been mocked. # check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] # Capture the run command. This", "'crypto' from the test_label in this instance. label_root = test_label.split(':')[0].lstrip('/') # A few", "arguments.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.test_timeout = 100 self._tester._device.log_level = 'debug' self._tester._device.should_start_vm = False", "# Ensure command runs in the autotest directory. self.assertCommandContains('cd /usr/local/autotest && ./bin/vm_sanity.py') def", "'ui.ChromeLogin'] self._tester.Run() # Ensure command is run with an env var for the", "parser errors when using certain commands.\"\"\" # Parser error if no test command", "with additional arguments.\"\"\" self._tester.deploy = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.nostrip = True self._tester.mount", "'--', 'true']) def testStartVMCustomPort(self): \"\"\"Verify that a custom SSH port is supported for", "tester = cros_test.CrOSTest(opts) tester._device.use_sudo = False tester._device.board = 'amd64-generic' tester._device.image_path = self.TempFilePath( 'chromiumos_qemu_image.bin')", "# Ensure FileList returns files when files_from does not exist. files_from = self.TempFilePath('file_list')", "= 'crypto_unittests' test_label = '//crypto:' + test_exe self.SetUpChromeTest(test_exe, test_label) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir)", "absolute path. self.CheckParserError(['--cwd', 'tmp/cwd'], 'cwd must be an absolute path') def testParserErrorFiles(self): \"\"\"Verify", "args to cros_test used to build a CrOSTest. Returns: An instance of cros_test.CrOSTest.", "flash command is skipped when not needed.\"\"\" self._tester.flash = True self._tester._device.board = 'octopus'", "properly. Args: test_exe: The name of the chrome test. test_label: The label of", "self._tester.build_dir, '--process-timeout', '180', '--device', self._tester._device.device + ':9222', '--board', 'amd64-generic', '--cache-dir', self._tester.cache_dir]) def testDeployChromeWithArgs(self):", "test_label = '//crypto:' + test_exe self.SetUpChromeTest(test_exe, test_label) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir) # Ensure", "commands using scp to copy.\"\"\" test_exe = 'crypto_unittests' test_label = '//crypto:' + test_exe", "directory. self.assertCommandContains('cd /usr/local/autotest && ./bin/vm_sanity.py') def testRunDeviceCmdWithoutSrcFiles(self): \"\"\"Verify running a remote command when", "Recreate args as a list if it is given as a string. if", "specified. self.CheckParserError(['--files', 'file_list', '--files-from', 'file'], '--files and --files-from') # Parser error when --files-from", "self._tester._device.should_start_vm = False self._tester._device.ssh_port = None self._tester._device.device = '172.16.17.32' self._tester.results_dir = '/tmp/results' self._tester.Run()", "['crypto_unittests', '--test-launcher-print-test-stdio=always'] self._tester.Run() # Ensure target directory is created on the DUT. self.assertCommandContains(['mkdir',", "is raised. Args: args: List of commandline arguments. error_msg: Error message to check", "'/tmp/filename2'] self.assertEqual(files, cros_test.FileList(files, None)) # Ensure FileList returns files when files_from does not", "target directory is created on the DUT. self.assertCommandContains(['mkdir', '-p', '/usr/local/cros_test']) # Ensure test", "test command') # Parser error if using chronos without a test command. self.CheckParserError('--as-chronos',", "else [test_exe] self._tester.chrome_test = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') osutils.SafeMakedirs(self._tester.build_dir) isolate_map = self.TempFilePath('testing/buildbot/gn_isolate_map.pyl') #", "True self._tester.mount = True self._tester.Run() self.assertCommandContains(['--nostrip', '--mount']) def testFetchResults(self): \"\"\"Verify that results files/directories", "self.assertCommandContains( ['ssh', '-p', '9222', 'root@localhost', '--', 'true']) def testStartVMCustomPort(self): \"\"\"Verify that a custom", "chrome command with additional arguments.\"\"\" self._tester.deploy = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.nostrip =", "check. with self.assertRaises(SystemExit): with outcap.OutputCapturer() as output: cros_test.ParseCommandLine(args) self.assertIn(error_msg, output.GetStderr()) def testParserErrorChromeTest(self): \"\"\"Verify", "os.path.join('/mnt/host/source', os.path.relpath(os.getcwd(), constants.SOURCE_ROOT)) test_results_dir = os.path.join(cwd, 'test_results') testing_rsa_dir = os.path.join(cwd, '.ssh/testing_rsa') self._tester._RunAutotest() self.assertCommandCalled(", "import partial_mock from chromite.scripts import cros_set_lsb_release from chromite.utils import outcap pytestmark = cros_test_lib.pytestmark_inside_only", "self.CheckParserError(['--results-dest-dir', '/tmp/dest_dir'], 'with results-dest-dir') # Parser error if results destination dir is a", "we use the custom port when talking to the VM. self.assertCommandContains( ['ssh', '-p',", "files used by the chrome test. runtime_deps = [ './%s' % test_exe, 'gen.runtime/%s/%s/%s.runtime_deps'", "we use the correct browser in guest mode.\"\"\" self._tester.catapult_tests = ['testAddResults'] self._tester.guest =", "= True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION: '12901.0.0', } self._tester.xbuddy =", "file_list = ['/tmp/file1', '/tmp/file2', '/tmp/file3'] osutils.WriteFile(files_from, '\\n'.join(file_list)) self.assertEqual(file_list, cros_test.FileList(files, files_from)) class CrOSTesterMiscTests(CrOSTesterBase): \"\"\"Tests", "self.assertCommandContains(['gn', 'desc', build_dir, test_label, 'runtime_deps']) # Ensure UI is stopped so the test", "error if the cwd refers to a parent path. self.CheckParserError(['--cwd', '../new_cwd'], 'cwd cannot", "files.\"\"\" # Ensure FileList returns files when files_from is None. files = ['/tmp/filename1',", "self.assertCommandContains(['deploy_chrome', '--force', '--build-dir', self._tester.build_dir, '--process-timeout', '180', '--device', self._tester._device.device + ':9222', '--board', 'amd64-generic', '--cache-dir',", "ui']) # Ensure a user activity ping is sent to the device. self.assertCommandContains(['ssh',", "duration of a test.\"\"\" return os.path.join(self.tempdir, file_path) class CrOSTester(CrOSTesterBase): \"\"\"Tests miscellaneous utility methods\"\"\"", "'crypto_unittests' test_label = '//crypto:' + test_exe self.SetUpChromeTest(test_exe, test_label) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir) #", "# check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] # Capture the run command. This is necessary", "test to the isolate map. osutils.WriteFile(isolate_map, \"\"\"{ \"%s\": { \"label\": \"%s\", \"type\": \"console_test_launcher\",", "specified. The remote command should not change the working directory or create a", "error when both --files and --files-from are specified. self.CheckParserError(['--files', 'file_list', '--files-from', 'file'], '--files", "path. self.CheckParserError(['--cwd', '../new_cwd'], 'cwd cannot start with ..') # Parser error if the", "device. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'dbus-send', '--system', '--type=method_call', '--dest=org.chromium.PowerManager', '/org/chromium/PowerManager', 'org.chromium.PowerManager.HandleUserActivity', 'int32:0'])", "from the parsed options. build_dir = cros_test.ParseCommandLine( ['--chrome-test', '--', test_dir]).build_dir self.assertEqual(build_dir, os.path.dirname(test_dir)) def", "test.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester.Run() self.assertCommandContains(['tast', 'run', '-build=false', '-waituntilready', '-extrauseflags=tast_vm', 'localhost:9222', 'ui.ChromeLogin']) def", "# Ensure that --host-cmd does not invoke ssh since it runs on the", "SSH port is supported for tests.\"\"\" self._tester = self.createTester(opts=['--ssh-port=12345']) self._tester.start_vm = True self._tester.Run()", "def testBasicAutotest(self): \"\"\"Tests a simple autotest call.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.Run() # Check", "test_exe test_args = ['--test-launcher-print-test-stdio=auto'] self.SetUpChromeTest(test_exe, test_label, test_args) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir, test_args) class", "run with an env var for the build dir, and ensure an #", "the specified chrome test to the isolate map. osutils.WriteFile(isolate_map, \"\"\"{ \"%s\": { \"label\":", "command is skipped when not needed.\"\"\" self._tester.flash = True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release", "create a temp directory on the target. \"\"\" self._tester.remote_cmd = True self._tester.args =", "errors when specifying the cwd.\"\"\" # Parser error if the cwd refers to", "'//crypto:' + test_exe self.SetUpChromeTest(test_exe, test_label) self._tester.Run() self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir) # Ensure files are", "= ['testAddResults'] self._tester.guest = True self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system-guest', 'testAddResults' ])", "for all tests.\"\"\" self._tester = self.createTester() def TempFilePath(self, file_path): \"\"\"Creates a temporary file", "being copied over to the device using scp. self.assertCommandContains(['scp', '%s/' % self._tester.staging_dir, 'root@localhost:/usr/local/chrome_test'])", "deploy chrome command with additional arguments.\"\"\" self._tester.deploy = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.nostrip", "opts=None): \"\"\"Builds a CrOSTest suitable for testing. Args: opts: Cmd-line args to cros_test", "a directory') def testParserErrorResultsSrc(self): \"\"\"Verify parser errors for results src/dest directories.\"\"\" # Parser", "disable=import-error from chromite.lib import constants from chromite.lib import cros_test from chromite.lib import cros_test_lib", "autotest test cases.\"\"\" def testBasicAutotest(self): \"\"\"Tests a simple autotest call.\"\"\" self._tester.autotest = ['accessibility_Sanity']", "tests for CrOSTest.\"\"\" from __future__ import print_function import os import sys import mock", "commands.\"\"\" # Parser error if no test command is provided. self.CheckParserError('--remote-cmd', 'specify test", "parser errors for building/deploying Chrome.\"\"\" # Parser error if no build directory is", "copied over to the device using rsync. self.assertCommandContains(['rsync', '%s/' % self._tester.staging_dir, '[root@localhost]:/usr/local/chrome_test']) @mock.patch('chromite.lib.remote_access.RemoteDevice.HasRsync',", "/dev/null -i /dev/null', '172.16.17.32', 'accessibility_Sanity'], dryrun=False, enter_chroot=True) @mock.patch('chromite.lib.cros_build_lib.IsInsideChroot', return_value=True) def testInsideChrootAutotest(self, _check_inside_chroot_mock): \"\"\"Tests", "['accessibility_Sanity'] self._tester.Run() # Check VM got launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Checks that autotest", "self._tester.Run() self.assertCommandContains([ os.path.join(tast_bin_dir, 'tast'), 'run', '-build=false', '-waituntilready', '-remoterunner=%s' % os.path.join(tast_bin_dir, 'remote_test_runner'), '-remotebundledir=%s' %", "a remote command when src files are not specified. The remote command should", "xbuddy link. self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run() self.assertCommandContains([ os.path.join(constants.CHROMITE_BIN_DIR, 'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus/R82-12901.0.0']) def", "(3, 6), 'This module requires Python 3.6+' # pylint: disable=protected-access class CrOSTesterBase(cros_test_lib.RunCommandTempDirTestCase): \"\"\"Base", "testFileList(self): \"\"\"Verify that FileList returns the correct files.\"\"\" # Ensure FileList returns files", "<filename>lib/cros_test_unittest.py # -*- coding: utf-8 -*- # Copyright 2019 The Chromium OS Authors.", "# Parser error if using chronos without a test command. self.CheckParserError('--as-chronos', 'as-chronos') #", "# Ensure target directory is removed at the end of the test. self.assertCommandContains(['rm',", "% filename, self._tester.results_dest_dir]) def testFileList(self): \"\"\"Verify that FileList returns the correct files.\"\"\" #", "chrome command.\"\"\" self._tester.deploy = True self._tester.build_dir = self.TempFilePath('out_amd64-generic/Release') self._tester.Run() self.assertCommandContains(['deploy_chrome', '--force', '--build-dir', self._tester.build_dir,", "check_inside_chroot_mock): \"\"\"Verify running a single tast test with various arguments.\"\"\" self._tester.tast = ['ui.ChromeLogin']", "canary. self._tester.flash = True self._tester.public_image = True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release = {", "self._tester.tast = [ '((\"dep:chrome\" || \"dep:android\") && !flaky && !disabled)' ] self._tester.Run() self.assertCommandContains([", "\"console_test_launcher\", } }\"\"\" % (test_exe, test_label), makedirs=True) self._tester.build = True self._tester.deploy = True", "/dev/null', 'localhost:9222', 'accessibility_Sanity']) def testAutotestWithArgs(self): \"\"\"Tests an autotest call with attributes.\"\"\" self._tester.autotest =", "cros_test from chromite.lib import cros_test_lib from chromite.lib import osutils from chromite.lib import partial_mock", "check_inside_chroot_mock.assert_called() self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir = '/mnt/host/source/test_results' self._tester._device.private_key = '/mnt/host/source/.ssh/testing_rsa' self._tester._RunAutotest() self.assertCommandContains([ '--results_dir',", "results src/dest directories.\"\"\" # Parser error if --results-src is not absolute. self.CheckParserError(['--results-src', 'tmp/results'],", "chrome test. build_dir: The directory where chrome is built. test_args: Chrome test arguments.", "os import sys import mock import pytest # pylint: disable=import-error from chromite.lib import", "def testAutotestWithArgs(self): \"\"\"Tests an autotest call with attributes.\"\"\" self._tester.autotest = ['accessibility_Sanity'] self._tester.results_dir =", "'--', 'stop ui']) # Ensure a user activity ping is sent to the", "self.CheckChromeTestCommands(test_exe, test_label, self._tester.build_dir, test_args) class CrOSTesterParser(CrOSTesterBase): \"\"\"Tests parser test cases.\"\"\" def CheckParserError(self, args,", "'cros'), 'flash', 'ssh://localhost:9222', 'xbuddy://remote/octopus-full/latest',]) # Specify an xbuddy link. self._tester.xbuddy = 'xbuddy://remote/octopus/R82-12901.0.0' self._tester.Run()", "ownership of the directory. self.assertCommandContains(['chown', '-R', 'chronos:', '/usr/local/cros_test']) # Ensure command runs in", "testRunDeviceCmdWithSetCwd(self): \"\"\"Verify a run device command call when giving a cwd.\"\"\" self._tester.remote_cmd =", "self._tester._device.ssh_port = None self._tester._device.device = '172.16.17.32' self._tester.results_dir = '/tmp/results' self._tester.Run() check_inside_chroot_mock.assert_called() self.assertCommandContains(['tast', '-verbose',", "def testStartVMCustomPort(self): \"\"\"Verify that a custom SSH port is supported for tests.\"\"\" self._tester", "# found in the LICENSE file. \"\"\"Unit tests for CrOSTest.\"\"\" from __future__ import", "# Use of this source code is governed by a BSD-style license that", "runtime_deps = [ './%s' % test_exe, 'gen.runtime/%s/%s/%s.runtime_deps' % (label_root, test_exe, test_exe), '../../third_party/chromite'] #", "does not invoke ssh since it runs on the host. self.assertCommandContains(['ssh', 'tast'], expected=False)", "launched. self.assertCommandContains([self._tester._device.qemu_path, '-enable-kvm']) # Check if new VM is responsive. self.assertCommandContains( ['ssh', '-p',", "writeable to use sudo. with mock.patch.object(os, 'access', return_value=True): tester = cros_test.CrOSTest(opts) tester._device.use_sudo =", "self.assertEqual(files, cros_test.FileList(files, None)) # Ensure FileList returns files when files_from does not exist.", "CrOSTester(CrOSTesterBase): \"\"\"Tests miscellaneous utility methods\"\"\" def testStartVM(self): \"\"\"Verify that a new VM is", "check=False, dryrun=False, extra_env={'CHROMIUM_OUTPUT_DIR': '/some/chromium/dir'}) # Ensure that --host-cmd does not invoke ssh since", "since it runs on the host. self.assertCommandContains(['ssh', 'tast'], expected=False) @pytest.mark.usefixtures('testcase_caplog') class CrOSTesterAutotest(CrOSTesterBase): \"\"\"Tests", "Parser error if build directory is not an existing directory. self.CheckParserError(['--deploy', '--build-dir', '/not/a/directory'],", "List of commandline arguments. error_msg: Error message to check for. \"\"\" # Recreate", "command call when giving a cwd.\"\"\" self._tester.remote_cmd = True self._tester.cwd = '/usr/local/autotest' self._tester.args", "a temp file path.\"\"\" def createTester(self, opts=None): \"\"\"Builds a CrOSTest suitable for testing.", "'testAddResults' ]) def testCatapultAsGuest(self): \"\"\"Verify that we use the correct browser in guest", "Parser error if there are args, but no command. self.CheckParserError('--some_test some_command', '--remote-cmd or", "testTastTestSDK(self): \"\"\"Verify running tast tests from the SimpleChrome SDK.\"\"\" self._tester.tast = ['ui.ChromeLogin'] self._tester._device.private_key", "True self._tester.Run() self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system-guest', 'testAddResults' ]) def testRunDeviceCmd(self): \"\"\"Verify a", "the GPU if needed. self.assertCommandContains(['ssh', '-p', '9222', 'root@localhost', '--', 'stop ui']) # Ensure", "Ensure FileList uses 'files_from' and ignores 'files'. file_list = ['/tmp/file1', '/tmp/file2', '/tmp/file3'] osutils.WriteFile(files_from,", "testing_rsa_dir = os.path.join(cwd, '.ssh/testing_rsa') self._tester._RunAutotest() self.assertCommandCalled( ['test_that', '--board', 'amd64-generic', '--results_dir', test_results_dir, '--ssh_private_key', testing_rsa_dir,", "self.assertCommandContains([ 'python', '/usr/local/telemetry/src/third_party/catapult/' 'telemetry/bin/run_tests', '--browser=system', 'testAddResults' ]) def testCatapultAsGuest(self): \"\"\"Verify that we use", "# Copyright 2019 The Chromium OS Authors. All rights reserved. # Use of", "test_exe: The name of the chrome test. test_label: The label of the chrome", "self.TempFilePath( 'chromiumos_qemu_image.bin') osutils.Touch(tester._device.image_path) version_str = ('QEMU emulator version 2.6.0, Copyright (c) ' '2003-2008", "= ['tast', 'run', 'localhost:9222', 'ui.ChromeLogin'] self._tester.Run() # Ensure command is run with an", "when not needed.\"\"\" self._tester.flash = True self._tester._device.board = 'octopus' self._tester._device.remote._lsb_release = { cros_set_lsb_release.LSB_KEY_VERSION:" ]
[ "pred, args): mask = (GT < args.maxdisp) & (GT >= 0) # print(mask.size(),", "if count == 0: count = 1 return torch.sum(torch.sqrt(torch.pow(GT[mask] - pred[mask], 2) +", "count == 0: count = 1 return torch.sum(torch.sqrt(torch.pow(GT[mask] - pred[mask], 2) + 4)", "= 1 return torch.sum(torch.sqrt(torch.pow(GT[mask] - pred[mask], 2) + 4) /2 - 1) /", "import torch def GERF_loss(GT, pred, args): mask = (GT < args.maxdisp) & (GT", "count = len(torch.nonzero(mask)) # print(count) if count == 0: count = 1 return", "args.maxdisp) & (GT >= 0) # print(mask.size(), GT.size(), pred.size()) count = len(torch.nonzero(mask)) #", "args): mask = (GT < args.maxdisp) & (GT >= 0) # print(mask.size(), GT.size(),", "def GERF_loss(GT, pred, args): mask = (GT < args.maxdisp) & (GT >= 0)", "== 0: count = 1 return torch.sum(torch.sqrt(torch.pow(GT[mask] - pred[mask], 2) + 4) /2", "# print(mask.size(), GT.size(), pred.size()) count = len(torch.nonzero(mask)) # print(count) if count == 0:", "= (GT < args.maxdisp) & (GT >= 0) # print(mask.size(), GT.size(), pred.size()) count", ">= 0) # print(mask.size(), GT.size(), pred.size()) count = len(torch.nonzero(mask)) # print(count) if count", "0: count = 1 return torch.sum(torch.sqrt(torch.pow(GT[mask] - pred[mask], 2) + 4) /2 -", "count = 1 return torch.sum(torch.sqrt(torch.pow(GT[mask] - pred[mask], 2) + 4) /2 - 1)", "< args.maxdisp) & (GT >= 0) # print(mask.size(), GT.size(), pred.size()) count = len(torch.nonzero(mask))", "# print(count) if count == 0: count = 1 return torch.sum(torch.sqrt(torch.pow(GT[mask] - pred[mask],", "mask = (GT < args.maxdisp) & (GT >= 0) # print(mask.size(), GT.size(), pred.size())", "(GT < args.maxdisp) & (GT >= 0) # print(mask.size(), GT.size(), pred.size()) count =", "print(mask.size(), GT.size(), pred.size()) count = len(torch.nonzero(mask)) # print(count) if count == 0: count", "pred.size()) count = len(torch.nonzero(mask)) # print(count) if count == 0: count = 1", "0) # print(mask.size(), GT.size(), pred.size()) count = len(torch.nonzero(mask)) # print(count) if count ==", "len(torch.nonzero(mask)) # print(count) if count == 0: count = 1 return torch.sum(torch.sqrt(torch.pow(GT[mask] -", "1 return torch.sum(torch.sqrt(torch.pow(GT[mask] - pred[mask], 2) + 4) /2 - 1) / count", "GERF_loss(GT, pred, args): mask = (GT < args.maxdisp) & (GT >= 0) #", "torch def GERF_loss(GT, pred, args): mask = (GT < args.maxdisp) & (GT >=", "(GT >= 0) # print(mask.size(), GT.size(), pred.size()) count = len(torch.nonzero(mask)) # print(count) if", "import os import torch def GERF_loss(GT, pred, args): mask = (GT < args.maxdisp)", "& (GT >= 0) # print(mask.size(), GT.size(), pred.size()) count = len(torch.nonzero(mask)) # print(count)", "GT.size(), pred.size()) count = len(torch.nonzero(mask)) # print(count) if count == 0: count =", "print(count) if count == 0: count = 1 return torch.sum(torch.sqrt(torch.pow(GT[mask] - pred[mask], 2)", "= len(torch.nonzero(mask)) # print(count) if count == 0: count = 1 return torch.sum(torch.sqrt(torch.pow(GT[mask]", "os import torch def GERF_loss(GT, pred, args): mask = (GT < args.maxdisp) &" ]
[ "import inspect import os def get_datasets_folder(): return os.path.join(get_data_folder(), \"Datasets\") def get_data_folder(): return os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))" ]
[ "@Comment: # ====================================================== import torch def default_collate(batch): elem = batch[0] elem_type = type(elem)", "<NAME> # @Email : # @File : collate.py # @Comment: # ====================================================== import", "# @Time : 20-12-26 下午4:42 # @Author : <NAME> # @Email : #", "====================================================== import torch def default_collate(batch): elem = batch[0] elem_type = type(elem) if isinstance(elem,", "type(elem) if isinstance(elem, torch.Tensor): return torch.stack(batch, 0) elif elem_type.__module__ == 'numpy': return default_collate([torch.as_tensor(b)", "@Time : 20-12-26 下午4:42 # @Author : <NAME> # @Email : # @File", "# @Author : <NAME> # @Email : # @File : collate.py # @Comment:", "import torch def default_collate(batch): elem = batch[0] elem_type = type(elem) if isinstance(elem, torch.Tensor):", "-*- coding: utf-8 -*- # ====================================================== # @Time : 20-12-26 下午4:42 # @Author", "default_collate(batch): elem = batch[0] elem_type = type(elem) if isinstance(elem, torch.Tensor): return torch.stack(batch, 0)", "0) elif elem_type.__module__ == 'numpy': return default_collate([torch.as_tensor(b) for b in batch]) else: raise", "return torch.stack(batch, 0) elif elem_type.__module__ == 'numpy': return default_collate([torch.as_tensor(b) for b in batch])", "coding: utf-8 -*- # ====================================================== # @Time : 20-12-26 下午4:42 # @Author :", "-*- # ====================================================== # @Time : 20-12-26 下午4:42 # @Author : <NAME> #", ": <NAME> # @Email : # @File : collate.py # @Comment: # ======================================================", "def default_collate(batch): elem = batch[0] elem_type = type(elem) if isinstance(elem, torch.Tensor): return torch.stack(batch,", "torch.Tensor): return torch.stack(batch, 0) elif elem_type.__module__ == 'numpy': return default_collate([torch.as_tensor(b) for b in", "if isinstance(elem, torch.Tensor): return torch.stack(batch, 0) elif elem_type.__module__ == 'numpy': return default_collate([torch.as_tensor(b) for", ": collate.py # @Comment: # ====================================================== import torch def default_collate(batch): elem = batch[0]", "20-12-26 下午4:42 # @Author : <NAME> # @Email : # @File : collate.py", "torch.stack(batch, 0) elif elem_type.__module__ == 'numpy': return default_collate([torch.as_tensor(b) for b in batch]) else:", "# @File : collate.py # @Comment: # ====================================================== import torch def default_collate(batch): elem", "@File : collate.py # @Comment: # ====================================================== import torch def default_collate(batch): elem =", "# ====================================================== # @Time : 20-12-26 下午4:42 # @Author : <NAME> # @Email", "# @Comment: # ====================================================== import torch def default_collate(batch): elem = batch[0] elem_type =", "isinstance(elem, torch.Tensor): return torch.stack(batch, 0) elif elem_type.__module__ == 'numpy': return default_collate([torch.as_tensor(b) for b", "= batch[0] elem_type = type(elem) if isinstance(elem, torch.Tensor): return torch.stack(batch, 0) elif elem_type.__module__", "batch[0] elem_type = type(elem) if isinstance(elem, torch.Tensor): return torch.stack(batch, 0) elif elem_type.__module__ ==", "torch def default_collate(batch): elem = batch[0] elem_type = type(elem) if isinstance(elem, torch.Tensor): return", "utf-8 -*- # ====================================================== # @Time : 20-12-26 下午4:42 # @Author : <NAME>", "<reponame>hhaAndroid/miniloader<filename>libv1/collate.py # -*- coding: utf-8 -*- # ====================================================== # @Time : 20-12-26 下午4:42", "= type(elem) if isinstance(elem, torch.Tensor): return torch.stack(batch, 0) elif elem_type.__module__ == 'numpy': return", "# @Email : # @File : collate.py # @Comment: # ====================================================== import torch", "elif elem_type.__module__ == 'numpy': return default_collate([torch.as_tensor(b) for b in batch]) else: raise NotImplementedError", "====================================================== # @Time : 20-12-26 下午4:42 # @Author : <NAME> # @Email :", ": # @File : collate.py # @Comment: # ====================================================== import torch def default_collate(batch):", "elem_type = type(elem) if isinstance(elem, torch.Tensor): return torch.stack(batch, 0) elif elem_type.__module__ == 'numpy':", "下午4:42 # @Author : <NAME> # @Email : # @File : collate.py #", "# ====================================================== import torch def default_collate(batch): elem = batch[0] elem_type = type(elem) if", "# -*- coding: utf-8 -*- # ====================================================== # @Time : 20-12-26 下午4:42 #", "@Email : # @File : collate.py # @Comment: # ====================================================== import torch def", ": 20-12-26 下午4:42 # @Author : <NAME> # @Email : # @File :", "elem = batch[0] elem_type = type(elem) if isinstance(elem, torch.Tensor): return torch.stack(batch, 0) elif", "@Author : <NAME> # @Email : # @File : collate.py # @Comment: #", "collate.py # @Comment: # ====================================================== import torch def default_collate(batch): elem = batch[0] elem_type" ]
[ "a == b]) return 0 def solve(data): rows = data.splitlines() return _solve_1(rows), _solve_2(rows)", "in d]) def _solve_2(rows): for i, r in enumerate(rows): for r2 in rows[i:]:", "data.splitlines() return _solve_1(rows), _solve_2(rows) if __name__ == '__main__': from AOC2018 import run_solver run_solver(solve,", "for x in d]) def _solve_2(rows): for i, r in enumerate(rows): for r2", "rows[i:]: diffs = len(r) - int(round(difflib.SequenceMatcher(a=r, b=r2).ratio() * len(r))) if diffs == 1:", "1: return \"\".join([a for a, b in zip(r, r2) if a == b])", "* sum([x[1] for x in d]) def _solve_2(rows): for i, r in enumerate(rows):", "\"\".join([a for a, b in zip(r, r2) if a == b]) return 0", "- int(round(difflib.SequenceMatcher(a=r, b=r2).ratio() * len(r))) if diffs == 1: return \"\".join([a for a,", "2 for x in counter.values()])), int(any([x == 3 for x in counter.values()])) def", "difflib def _checksum(r): counter = Counter(r) return int(any([x == 2 for x in", "in zip(r, r2) if a == b]) return 0 def solve(data): rows =", "int(any([x == 3 for x in counter.values()])) def _solve_1(rows): d = [_checksum(row) for", "== b]) return 0 def solve(data): rows = data.splitlines() return _solve_1(rows), _solve_2(rows) if", "0 def solve(data): rows = data.splitlines() return _solve_1(rows), _solve_2(rows) if __name__ == '__main__':", "def _checksum(r): counter = Counter(r) return int(any([x == 2 for x in counter.values()])),", "import Counter import difflib def _checksum(r): counter = Counter(r) return int(any([x == 2", "r2 in rows[i:]: diffs = len(r) - int(round(difflib.SequenceMatcher(a=r, b=r2).ratio() * len(r))) if diffs", "zip(r, r2) if a == b]) return 0 def solve(data): rows = data.splitlines()", "x in counter.values()])) def _solve_1(rows): d = [_checksum(row) for row in rows] return", "sum([x[0] for x in d]) * sum([x[1] for x in d]) def _solve_2(rows):", "d]) def _solve_2(rows): for i, r in enumerate(rows): for r2 in rows[i:]: diffs", "sum([x[1] for x in d]) def _solve_2(rows): for i, r in enumerate(rows): for", "_solve_1(rows): d = [_checksum(row) for row in rows] return sum([x[0] for x in", "enumerate(rows): for r2 in rows[i:]: diffs = len(r) - int(round(difflib.SequenceMatcher(a=r, b=r2).ratio() * len(r)))", "for r2 in rows[i:]: diffs = len(r) - int(round(difflib.SequenceMatcher(a=r, b=r2).ratio() * len(r))) if", "for i, r in enumerate(rows): for r2 in rows[i:]: diffs = len(r) -", "[_checksum(row) for row in rows] return sum([x[0] for x in d]) * sum([x[1]", "return \"\".join([a for a, b in zip(r, r2) if a == b]) return", "for x in d]) * sum([x[1] for x in d]) def _solve_2(rows): for", "in rows] return sum([x[0] for x in d]) * sum([x[1] for x in", "r in enumerate(rows): for r2 in rows[i:]: diffs = len(r) - int(round(difflib.SequenceMatcher(a=r, b=r2).ratio()", "b in zip(r, r2) if a == b]) return 0 def solve(data): rows", "== 3 for x in counter.values()])) def _solve_1(rows): d = [_checksum(row) for row", "rows] return sum([x[0] for x in d]) * sum([x[1] for x in d])", "_checksum(r): counter = Counter(r) return int(any([x == 2 for x in counter.values()])), int(any([x", "for x in counter.values()])) def _solve_1(rows): d = [_checksum(row) for row in rows]", "== 2 for x in counter.values()])), int(any([x == 3 for x in counter.values()]))", "int(round(difflib.SequenceMatcher(a=r, b=r2).ratio() * len(r))) if diffs == 1: return \"\".join([a for a, b", "diffs == 1: return \"\".join([a for a, b in zip(r, r2) if a", "def _solve_2(rows): for i, r in enumerate(rows): for r2 in rows[i:]: diffs =", "in counter.values()])), int(any([x == 3 for x in counter.values()])) def _solve_1(rows): d =", "in d]) * sum([x[1] for x in d]) def _solve_2(rows): for i, r", "== 1: return \"\".join([a for a, b in zip(r, r2) if a ==", "return 0 def solve(data): rows = data.splitlines() return _solve_1(rows), _solve_2(rows) if __name__ ==", "x in d]) def _solve_2(rows): for i, r in enumerate(rows): for r2 in", "b]) return 0 def solve(data): rows = data.splitlines() return _solve_1(rows), _solve_2(rows) if __name__", "collections import Counter import difflib def _checksum(r): counter = Counter(r) return int(any([x ==", "len(r) - int(round(difflib.SequenceMatcher(a=r, b=r2).ratio() * len(r))) if diffs == 1: return \"\".join([a for", "rows = data.splitlines() return _solve_1(rows), _solve_2(rows) if __name__ == '__main__': from AOC2018 import", "for x in counter.values()])), int(any([x == 3 for x in counter.values()])) def _solve_1(rows):", "def _solve_1(rows): d = [_checksum(row) for row in rows] return sum([x[0] for x", "counter.values()])) def _solve_1(rows): d = [_checksum(row) for row in rows] return sum([x[0] for", "from collections import Counter import difflib def _checksum(r): counter = Counter(r) return int(any([x", "len(r))) if diffs == 1: return \"\".join([a for a, b in zip(r, r2)", "x in counter.values()])), int(any([x == 3 for x in counter.values()])) def _solve_1(rows): d", "Counter import difflib def _checksum(r): counter = Counter(r) return int(any([x == 2 for", "3 for x in counter.values()])) def _solve_1(rows): d = [_checksum(row) for row in", "= [_checksum(row) for row in rows] return sum([x[0] for x in d]) *", "in enumerate(rows): for r2 in rows[i:]: diffs = len(r) - int(round(difflib.SequenceMatcher(a=r, b=r2).ratio() *", "= Counter(r) return int(any([x == 2 for x in counter.values()])), int(any([x == 3", "if diffs == 1: return \"\".join([a for a, b in zip(r, r2) if", "return _solve_1(rows), _solve_2(rows) if __name__ == '__main__': from AOC2018 import run_solver run_solver(solve, __file__)", "if a == b]) return 0 def solve(data): rows = data.splitlines() return _solve_1(rows),", "for row in rows] return sum([x[0] for x in d]) * sum([x[1] for", "counter.values()])), int(any([x == 3 for x in counter.values()])) def _solve_1(rows): d = [_checksum(row)", "def solve(data): rows = data.splitlines() return _solve_1(rows), _solve_2(rows) if __name__ == '__main__': from", "= len(r) - int(round(difflib.SequenceMatcher(a=r, b=r2).ratio() * len(r))) if diffs == 1: return \"\".join([a", "* len(r))) if diffs == 1: return \"\".join([a for a, b in zip(r,", "import difflib def _checksum(r): counter = Counter(r) return int(any([x == 2 for x", "return sum([x[0] for x in d]) * sum([x[1] for x in d]) def", "return int(any([x == 2 for x in counter.values()])), int(any([x == 3 for x", "in rows[i:]: diffs = len(r) - int(round(difflib.SequenceMatcher(a=r, b=r2).ratio() * len(r))) if diffs ==", "x in d]) * sum([x[1] for x in d]) def _solve_2(rows): for i,", "a, b in zip(r, r2) if a == b]) return 0 def solve(data):", "d = [_checksum(row) for row in rows] return sum([x[0] for x in d])", "= data.splitlines() return _solve_1(rows), _solve_2(rows) if __name__ == '__main__': from AOC2018 import run_solver", "diffs = len(r) - int(round(difflib.SequenceMatcher(a=r, b=r2).ratio() * len(r))) if diffs == 1: return", "i, r in enumerate(rows): for r2 in rows[i:]: diffs = len(r) - int(round(difflib.SequenceMatcher(a=r,", "r2) if a == b]) return 0 def solve(data): rows = data.splitlines() return", "b=r2).ratio() * len(r))) if diffs == 1: return \"\".join([a for a, b in", "row in rows] return sum([x[0] for x in d]) * sum([x[1] for x", "int(any([x == 2 for x in counter.values()])), int(any([x == 3 for x in", "solve(data): rows = data.splitlines() return _solve_1(rows), _solve_2(rows) if __name__ == '__main__': from AOC2018", "_solve_2(rows): for i, r in enumerate(rows): for r2 in rows[i:]: diffs = len(r)", "in counter.values()])) def _solve_1(rows): d = [_checksum(row) for row in rows] return sum([x[0]", "for a, b in zip(r, r2) if a == b]) return 0 def", "Counter(r) return int(any([x == 2 for x in counter.values()])), int(any([x == 3 for", "counter = Counter(r) return int(any([x == 2 for x in counter.values()])), int(any([x ==", "d]) * sum([x[1] for x in d]) def _solve_2(rows): for i, r in" ]
[ "self.lock = lock or asyncio.Lock(loop=self.loop) self.last_request_time = 0 @property def state(self): return self.proto.state", "\"AioHTTPInstagramApi\", ) class AioHTTPInstagramApi: def __init__(self, username, password, state=None, delay=5, proxy=None, loop=None, lock=None):", "loop or asyncio.get_event_loop() self.lock = lock or asyncio.Lock(loop=self.loop) self.last_request_time = 0 @property def", "loop=None, lock=None): if proxy is None: self._conn = None else: self._conn = aiohttp.ProxyConnector(proxy=proxy)", "raise InstagramError(response) return Protocol.Response( cookies={c.key: c.value for c in session.cookie_jar}, json=await response.json(), status_code=response.status,", "None else: self._conn = aiohttp.ProxyConnector(proxy=proxy) self.proto = Protocol(username, password, state) self.delay = delay", "wrapper async def _request(self, request): kw = request._asdict() async with aiohttp.ClientSession(cookies=kw.pop(\"cookies\")) as session:", "return wrapper async def _request(self, request): kw = request._asdict() async with aiohttp.ClientSession(cookies=kw.pop(\"cookies\")) as", "AioHTTPInstagramApi: def __init__(self, username, password, state=None, delay=5, proxy=None, loop=None, lock=None): if proxy is", "not await response.read(): raise InstagramError(response) return Protocol.Response( cookies={c.key: c.value for c in session.cookie_jar},", "self.last_request_time)) await asyncio.sleep(timeout, loop=self.loop) self.last_request_time = self.loop.time() response = await self._request(request) return response.json", "if proxy is None: self._conn = None else: self._conn = aiohttp.ProxyConnector(proxy=proxy) self.proto =", "self.delay - (now - self.last_request_time)) await asyncio.sleep(timeout, loop=self.loop) self.last_request_time = self.loop.time() response =", "asyncio import functools import contextlib import aiohttp from ..protocol import Protocol from ..exceptions", "asyncio.Lock(loop=self.loop) self.last_request_time = 0 @property def state(self): return self.proto.state def __getattr__(self, name): method", "async with aiohttp.ClientSession(cookies=kw.pop(\"cookies\")) as session: async with session.request(**kw) as response: if not await", "import Protocol from ..exceptions import InstagramError __all__ = ( \"AioHTTPInstagramApi\", ) class AioHTTPInstagramApi:", "return self._run(method(*args, **kwargs)) return wrapper async def _request(self, request): kw = request._asdict() async", "password, state=None, delay=5, proxy=None, loop=None, lock=None): if proxy is None: self._conn = None", "session.request(**kw) as response: if not await response.read(): raise InstagramError(response) return Protocol.Response( cookies={c.key: c.value", "state) self.delay = delay self.loop = loop or asyncio.get_event_loop() self.lock = lock or", "import asyncio import functools import contextlib import aiohttp from ..protocol import Protocol from", "= Protocol(username, password, state) self.delay = delay self.loop = loop or asyncio.get_event_loop() self.lock", "( \"AioHTTPInstagramApi\", ) class AioHTTPInstagramApi: def __init__(self, username, password, state=None, delay=5, proxy=None, loop=None,", "while True: request = generator.send(response) now = self.loop.time() timeout = max(0, self.delay -", "None: self._conn = None else: self._conn = aiohttp.ProxyConnector(proxy=proxy) self.proto = Protocol(username, password, state)", "state(self): return self.proto.state def __getattr__(self, name): method = getattr(self.proto, name) @functools.wraps(method) def wrapper(*args,", "async with session.request(**kw) as response: if not await response.read(): raise InstagramError(response) return Protocol.Response(", "(now - self.last_request_time)) await asyncio.sleep(timeout, loop=self.loop) self.last_request_time = self.loop.time() response = await self._request(request)", "self.loop = loop or asyncio.get_event_loop() self.lock = lock or asyncio.Lock(loop=self.loop) self.last_request_time = 0", "delay=5, proxy=None, loop=None, lock=None): if proxy is None: self._conn = None else: self._conn", "getattr(self.proto, name) @functools.wraps(method) def wrapper(*args, **kwargs): return self._run(method(*args, **kwargs)) return wrapper async def", "delay self.loop = loop or asyncio.get_event_loop() self.lock = lock or asyncio.Lock(loop=self.loop) self.last_request_time =", "(await self.lock): response = None with contextlib.suppress(StopIteration): while True: request = generator.send(response) now", "c.value for c in session.cookie_jar}, json=await response.json(), status_code=response.status, ) async def _run(self, generator):", "contextlib import aiohttp from ..protocol import Protocol from ..exceptions import InstagramError __all__ =", "self.delay = delay self.loop = loop or asyncio.get_event_loop() self.lock = lock or asyncio.Lock(loop=self.loop)", "def _run(self, generator): with (await self.lock): response = None with contextlib.suppress(StopIteration): while True:", "in session.cookie_jar}, json=await response.json(), status_code=response.status, ) async def _run(self, generator): with (await self.lock):", "async def _request(self, request): kw = request._asdict() async with aiohttp.ClientSession(cookies=kw.pop(\"cookies\")) as session: async", "lock or asyncio.Lock(loop=self.loop) self.last_request_time = 0 @property def state(self): return self.proto.state def __getattr__(self,", "with session.request(**kw) as response: if not await response.read(): raise InstagramError(response) return Protocol.Response( cookies={c.key:", ") class AioHTTPInstagramApi: def __init__(self, username, password, state=None, delay=5, proxy=None, loop=None, lock=None): if", "self._conn = aiohttp.ProxyConnector(proxy=proxy) self.proto = Protocol(username, password, state) self.delay = delay self.loop =", "_run(self, generator): with (await self.lock): response = None with contextlib.suppress(StopIteration): while True: request", "generator): with (await self.lock): response = None with contextlib.suppress(StopIteration): while True: request =", "__init__(self, username, password, state=None, delay=5, proxy=None, loop=None, lock=None): if proxy is None: self._conn", "- self.last_request_time)) await asyncio.sleep(timeout, loop=self.loop) self.last_request_time = self.loop.time() response = await self._request(request) return", "is None: self._conn = None else: self._conn = aiohttp.ProxyConnector(proxy=proxy) self.proto = Protocol(username, password,", "request): kw = request._asdict() async with aiohttp.ClientSession(cookies=kw.pop(\"cookies\")) as session: async with session.request(**kw) as", "max(0, self.delay - (now - self.last_request_time)) await asyncio.sleep(timeout, loop=self.loop) self.last_request_time = self.loop.time() response", "self.proto.state def __getattr__(self, name): method = getattr(self.proto, name) @functools.wraps(method) def wrapper(*args, **kwargs): return", "state=None, delay=5, proxy=None, loop=None, lock=None): if proxy is None: self._conn = None else:", "or asyncio.get_event_loop() self.lock = lock or asyncio.Lock(loop=self.loop) self.last_request_time = 0 @property def state(self):", "= request._asdict() async with aiohttp.ClientSession(cookies=kw.pop(\"cookies\")) as session: async with session.request(**kw) as response: if", ") async def _run(self, generator): with (await self.lock): response = None with contextlib.suppress(StopIteration):", "= self.loop.time() timeout = max(0, self.delay - (now - self.last_request_time)) await asyncio.sleep(timeout, loop=self.loop)", "import functools import contextlib import aiohttp from ..protocol import Protocol from ..exceptions import", "= 0 @property def state(self): return self.proto.state def __getattr__(self, name): method = getattr(self.proto,", "method = getattr(self.proto, name) @functools.wraps(method) def wrapper(*args, **kwargs): return self._run(method(*args, **kwargs)) return wrapper", "generator.send(response) now = self.loop.time() timeout = max(0, self.delay - (now - self.last_request_time)) await", "= max(0, self.delay - (now - self.last_request_time)) await asyncio.sleep(timeout, loop=self.loop) self.last_request_time = self.loop.time()", "= None with contextlib.suppress(StopIteration): while True: request = generator.send(response) now = self.loop.time() timeout", "session.cookie_jar}, json=await response.json(), status_code=response.status, ) async def _run(self, generator): with (await self.lock): response", "response: if not await response.read(): raise InstagramError(response) return Protocol.Response( cookies={c.key: c.value for c", "response.read(): raise InstagramError(response) return Protocol.Response( cookies={c.key: c.value for c in session.cookie_jar}, json=await response.json(),", "def __getattr__(self, name): method = getattr(self.proto, name) @functools.wraps(method) def wrapper(*args, **kwargs): return self._run(method(*args,", "def __init__(self, username, password, state=None, delay=5, proxy=None, loop=None, lock=None): if proxy is None:", "= generator.send(response) now = self.loop.time() timeout = max(0, self.delay - (now - self.last_request_time))", "InstagramError(response) return Protocol.Response( cookies={c.key: c.value for c in session.cookie_jar}, json=await response.json(), status_code=response.status, )", "self._conn = None else: self._conn = aiohttp.ProxyConnector(proxy=proxy) self.proto = Protocol(username, password, state) self.delay", "proxy=None, loop=None, lock=None): if proxy is None: self._conn = None else: self._conn =", "as response: if not await response.read(): raise InstagramError(response) return Protocol.Response( cookies={c.key: c.value for", "aiohttp from ..protocol import Protocol from ..exceptions import InstagramError __all__ = ( \"AioHTTPInstagramApi\",", "__getattr__(self, name): method = getattr(self.proto, name) @functools.wraps(method) def wrapper(*args, **kwargs): return self._run(method(*args, **kwargs))", "from ..exceptions import InstagramError __all__ = ( \"AioHTTPInstagramApi\", ) class AioHTTPInstagramApi: def __init__(self,", "contextlib.suppress(StopIteration): while True: request = generator.send(response) now = self.loop.time() timeout = max(0, self.delay", "response = None with contextlib.suppress(StopIteration): while True: request = generator.send(response) now = self.loop.time()", "c in session.cookie_jar}, json=await response.json(), status_code=response.status, ) async def _run(self, generator): with (await", "await response.read(): raise InstagramError(response) return Protocol.Response( cookies={c.key: c.value for c in session.cookie_jar}, json=await", "if not await response.read(): raise InstagramError(response) return Protocol.Response( cookies={c.key: c.value for c in", "None with contextlib.suppress(StopIteration): while True: request = generator.send(response) now = self.loop.time() timeout =", "username, password, state=None, delay=5, proxy=None, loop=None, lock=None): if proxy is None: self._conn =", "self.last_request_time = 0 @property def state(self): return self.proto.state def __getattr__(self, name): method =", "self._run(method(*args, **kwargs)) return wrapper async def _request(self, request): kw = request._asdict() async with", "Protocol.Response( cookies={c.key: c.value for c in session.cookie_jar}, json=await response.json(), status_code=response.status, ) async def", "wrapper(*args, **kwargs): return self._run(method(*args, **kwargs)) return wrapper async def _request(self, request): kw =", "with aiohttp.ClientSession(cookies=kw.pop(\"cookies\")) as session: async with session.request(**kw) as response: if not await response.read():", "timeout = max(0, self.delay - (now - self.last_request_time)) await asyncio.sleep(timeout, loop=self.loop) self.last_request_time =", "= ( \"AioHTTPInstagramApi\", ) class AioHTTPInstagramApi: def __init__(self, username, password, state=None, delay=5, proxy=None,", "response.json(), status_code=response.status, ) async def _run(self, generator): with (await self.lock): response = None", "__all__ = ( \"AioHTTPInstagramApi\", ) class AioHTTPInstagramApi: def __init__(self, username, password, state=None, delay=5,", "= loop or asyncio.get_event_loop() self.lock = lock or asyncio.Lock(loop=self.loop) self.last_request_time = 0 @property", "with (await self.lock): response = None with contextlib.suppress(StopIteration): while True: request = generator.send(response)", "..exceptions import InstagramError __all__ = ( \"AioHTTPInstagramApi\", ) class AioHTTPInstagramApi: def __init__(self, username,", "name): method = getattr(self.proto, name) @functools.wraps(method) def wrapper(*args, **kwargs): return self._run(method(*args, **kwargs)) return", "def _request(self, request): kw = request._asdict() async with aiohttp.ClientSession(cookies=kw.pop(\"cookies\")) as session: async with", "Protocol from ..exceptions import InstagramError __all__ = ( \"AioHTTPInstagramApi\", ) class AioHTTPInstagramApi: def", "def state(self): return self.proto.state def __getattr__(self, name): method = getattr(self.proto, name) @functools.wraps(method) def", "asyncio.get_event_loop() self.lock = lock or asyncio.Lock(loop=self.loop) self.last_request_time = 0 @property def state(self): return", "self.lock): response = None with contextlib.suppress(StopIteration): while True: request = generator.send(response) now =", "session: async with session.request(**kw) as response: if not await response.read(): raise InstagramError(response) return", "= aiohttp.ProxyConnector(proxy=proxy) self.proto = Protocol(username, password, state) self.delay = delay self.loop = loop", "0 @property def state(self): return self.proto.state def __getattr__(self, name): method = getattr(self.proto, name)", "json=await response.json(), status_code=response.status, ) async def _run(self, generator): with (await self.lock): response =", "from ..protocol import Protocol from ..exceptions import InstagramError __all__ = ( \"AioHTTPInstagramApi\", )", "proxy is None: self._conn = None else: self._conn = aiohttp.ProxyConnector(proxy=proxy) self.proto = Protocol(username,", "else: self._conn = aiohttp.ProxyConnector(proxy=proxy) self.proto = Protocol(username, password, state) self.delay = delay self.loop", "kw = request._asdict() async with aiohttp.ClientSession(cookies=kw.pop(\"cookies\")) as session: async with session.request(**kw) as response:", "@functools.wraps(method) def wrapper(*args, **kwargs): return self._run(method(*args, **kwargs)) return wrapper async def _request(self, request):", "class AioHTTPInstagramApi: def __init__(self, username, password, state=None, delay=5, proxy=None, loop=None, lock=None): if proxy", "import aiohttp from ..protocol import Protocol from ..exceptions import InstagramError __all__ = (", "**kwargs)) return wrapper async def _request(self, request): kw = request._asdict() async with aiohttp.ClientSession(cookies=kw.pop(\"cookies\"))", "cookies={c.key: c.value for c in session.cookie_jar}, json=await response.json(), status_code=response.status, ) async def _run(self,", "import contextlib import aiohttp from ..protocol import Protocol from ..exceptions import InstagramError __all__", "**kwargs): return self._run(method(*args, **kwargs)) return wrapper async def _request(self, request): kw = request._asdict()", "self.proto = Protocol(username, password, state) self.delay = delay self.loop = loop or asyncio.get_event_loop()", "request = generator.send(response) now = self.loop.time() timeout = max(0, self.delay - (now -", "Protocol(username, password, state) self.delay = delay self.loop = loop or asyncio.get_event_loop() self.lock =", "- (now - self.last_request_time)) await asyncio.sleep(timeout, loop=self.loop) self.last_request_time = self.loop.time() response = await", "def wrapper(*args, **kwargs): return self._run(method(*args, **kwargs)) return wrapper async def _request(self, request): kw", "= getattr(self.proto, name) @functools.wraps(method) def wrapper(*args, **kwargs): return self._run(method(*args, **kwargs)) return wrapper async", "return self.proto.state def __getattr__(self, name): method = getattr(self.proto, name) @functools.wraps(method) def wrapper(*args, **kwargs):", "= None else: self._conn = aiohttp.ProxyConnector(proxy=proxy) self.proto = Protocol(username, password, state) self.delay =", "async def _run(self, generator): with (await self.lock): response = None with contextlib.suppress(StopIteration): while", "now = self.loop.time() timeout = max(0, self.delay - (now - self.last_request_time)) await asyncio.sleep(timeout,", "lock=None): if proxy is None: self._conn = None else: self._conn = aiohttp.ProxyConnector(proxy=proxy) self.proto", "= delay self.loop = loop or asyncio.get_event_loop() self.lock = lock or asyncio.Lock(loop=self.loop) self.last_request_time", "aiohttp.ProxyConnector(proxy=proxy) self.proto = Protocol(username, password, state) self.delay = delay self.loop = loop or", "@property def state(self): return self.proto.state def __getattr__(self, name): method = getattr(self.proto, name) @functools.wraps(method)", "aiohttp.ClientSession(cookies=kw.pop(\"cookies\")) as session: async with session.request(**kw) as response: if not await response.read(): raise", "return Protocol.Response( cookies={c.key: c.value for c in session.cookie_jar}, json=await response.json(), status_code=response.status, ) async", "as session: async with session.request(**kw) as response: if not await response.read(): raise InstagramError(response)", "..protocol import Protocol from ..exceptions import InstagramError __all__ = ( \"AioHTTPInstagramApi\", ) class", "password, state) self.delay = delay self.loop = loop or asyncio.get_event_loop() self.lock = lock", "for c in session.cookie_jar}, json=await response.json(), status_code=response.status, ) async def _run(self, generator): with", "_request(self, request): kw = request._asdict() async with aiohttp.ClientSession(cookies=kw.pop(\"cookies\")) as session: async with session.request(**kw)", "status_code=response.status, ) async def _run(self, generator): with (await self.lock): response = None with", "= lock or asyncio.Lock(loop=self.loop) self.last_request_time = 0 @property def state(self): return self.proto.state def", "functools import contextlib import aiohttp from ..protocol import Protocol from ..exceptions import InstagramError", "with contextlib.suppress(StopIteration): while True: request = generator.send(response) now = self.loop.time() timeout = max(0,", "request._asdict() async with aiohttp.ClientSession(cookies=kw.pop(\"cookies\")) as session: async with session.request(**kw) as response: if not", "InstagramError __all__ = ( \"AioHTTPInstagramApi\", ) class AioHTTPInstagramApi: def __init__(self, username, password, state=None,", "or asyncio.Lock(loop=self.loop) self.last_request_time = 0 @property def state(self): return self.proto.state def __getattr__(self, name):", "import InstagramError __all__ = ( \"AioHTTPInstagramApi\", ) class AioHTTPInstagramApi: def __init__(self, username, password,", "True: request = generator.send(response) now = self.loop.time() timeout = max(0, self.delay - (now", "self.loop.time() timeout = max(0, self.delay - (now - self.last_request_time)) await asyncio.sleep(timeout, loop=self.loop) self.last_request_time", "name) @functools.wraps(method) def wrapper(*args, **kwargs): return self._run(method(*args, **kwargs)) return wrapper async def _request(self," ]
[ "str): context['error'] = error else: raise TypeError('Error message must be a string') return", "= error else: raise TypeError('Error message must be a string') return jinja2.Environment( loader=jinja2.FileSystemLoader(path)", "context={}, error=None, path='templates'): if error: # Error should be a string if isinstance(error,", "isinstance(error, str): context['error'] = error else: raise TypeError('Error message must be a string')", "context['error'] = error else: raise TypeError('Error message must be a string') return jinja2.Environment(", "error=None, path='templates'): if error: # Error should be a string if isinstance(error, str):", "error: # Error should be a string if isinstance(error, str): context['error'] = error", "def render(filename, context={}, error=None, path='templates'): if error: # Error should be a string", "if isinstance(error, str): context['error'] = error else: raise TypeError('Error message must be a", "should be a string if isinstance(error, str): context['error'] = error else: raise TypeError('Error", "if error: # Error should be a string if isinstance(error, str): context['error'] =", "Error should be a string if isinstance(error, str): context['error'] = error else: raise", "error else: raise TypeError('Error message must be a string') return jinja2.Environment( loader=jinja2.FileSystemLoader(path) ).get_template(filename).render(context)", "render(filename, context={}, error=None, path='templates'): if error: # Error should be a string if", "a string if isinstance(error, str): context['error'] = error else: raise TypeError('Error message must", "be a string if isinstance(error, str): context['error'] = error else: raise TypeError('Error message", "import jinja2 def render(filename, context={}, error=None, path='templates'): if error: # Error should be", "jinja2 def render(filename, context={}, error=None, path='templates'): if error: # Error should be a", "# Error should be a string if isinstance(error, str): context['error'] = error else:", "string if isinstance(error, str): context['error'] = error else: raise TypeError('Error message must be", "path='templates'): if error: # Error should be a string if isinstance(error, str): context['error']" ]
[ "from compas.data import Data class Curve(Data): \"\"\"Base class for all curves in this", "compas.data import Data class Curve(Data): \"\"\"Base class for all curves in this package.\"\"\"" ]
[ "= '' marker = '' if self.server.cog_states[i].get_state() == p2db_server.CogState.IDLE: fmt = Fore.GREEN elif", "ui_instance.update_log(r + \"\\n\", Fore.RED) def on_getreg(self, args): ui_instance = UI.instance if len(args) !=", "@ {:#02x} -> {:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\") def on_cog(self, args): ui_instance =", "= WordCompleter(list(self.commands.keys())) self.prompt = TextArea( height=1, prompt=\"p2db > \", multiline=False, wrap_lines=False, complete_while_typing=True, completer=cmd_completer,", "+ args[0] + \"\\n\", Fore.RED) self.dirty = True @kb.add('c-i') def shift_focus(e): e.app.layout.focus_next() def", "(unimplemented) Continue execution. Cog will be disconnected until it interrupts itself reset :", "args = cmd.split(' ') if args[0] in self.commands: self.commands[args[0]](args[1:]) else: self.update_log(\"Unknown command: \"", "self.instruction_window, ]) root_container = HSplit([ body, self.log_window, self.prompt_window ]) layout = Layout(root_container, self.prompt)", "event.app.exit() def on_help(self, args): ui_instance = UI.instance ui_instance.update_log(ui_instance.help_text + \"\\n\") @kb.add('c-s') def on_step(self,", "'' inst = \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.LIGHTGREEN_EX, sec[i][1], Fore.RESET) elif 'jmp' in", ": Set the active cog to n cogaddr <addr> : Set the cog", "Fore, Style import threading import logging import time import re from . import", "Fore.RED) return r = ui_instance.server.get_reg(args[0]) if (r[0]): ui_instance.update_log(r[0], Fore.RED) else: try: addr =", "= HSplit([ Box(Window(self.function_header, height=1), 1, padding_top=0), Box(Window(self.instructions, height=40), 1) ]) # Frames for", "if call address is 0x200-0x400, convert it to where the LUT function is", "+ \"\\n\", Fore.RED) self.dirty = True @kb.add('c-i') def shift_focus(e): e.app.layout.focus_next() def update_log(self, new_text,", "or 'tj' in sec[i][1] or 'dj' in sec[i][1]: inst = \" {:x}: {}{}{}{}\\n\".format(i,", "@kb.add('c-p') def on_pins(self, args=[]): ui_instance = UI.instance ui_instance.server.update_pins() def on_continue(self, args=[]): ui_instance =", "in range(section_addr, section_addr + 4*(len(sec) - 1), 4): inst = \" {:x}: {}", "if (r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else: ui_instance.update_log(\"long @ {:#02x} -> {:#02x}\".format(int(args[0], 16),", "= self.server.get_status() if (stat): # draw the status dictionary stat_dict = vars(self.server.get_status()) stat_lines", "1), \"Status\") self.instruction_window = Frame(Box(instruction_split, 1), \"Source\") self.log_window = Frame(Box(self.log_area, padding=1, padding_bottom=0)) self.prompt_window", "\" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.LIGHTGREEN_EX, sec[i][1], Fore.RESET) elif 'jmp' in sec[i][1] or 'tj'", "r else 0 # call_dest = p2tools.get_section(self.obj_data, call_addr) # if call_addr != 0:", "data_str = '' section_addr = sec['section_addr'] for i in range(section_addr, section_addr + 4*(len(sec)", "pin_str = porta_str + '\\n\\n\\n' + portb_str + Fore.RESET + Style.RESET_ALL self.pins.text =", "(self.server.ina >> i) & 1: bit = 'H' else: bit = 'L' if", "= ANSI(Fore.YELLOW + \"Cog Execution Mode. Set base address with 'cogaddr' to see", "self.app = Application(full_screen=True) self.current_func = '' self.dirty = True self.render_lock = threading.Lock() #", "self.obj_data[sec]: section = self.obj_data[sec] func_name = sec if cog_mode and stat.exec_mode != 'lutex'", "'L' if not self.server.have_pin_data: bit = 'X' color = Fore.LIGHTBLACK_EX portb_str += color", "\"\\n\") def on_getbyte(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected", "\"\\n\", Fore.RED) else: ui_instance.update_log(\"byte @ {:#02x} -> {:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\") def", "\"\\n\") @kb.add('c-s') def on_step(self, args=[]): ui_instance = UI.instance r = ui_instance.server.step() if r:", "name. Address should be in hex getbyte <addr> : Get the byte at", "import WordCompleter from prompt_toolkit.data_structures import Point from prompt_toolkit.formatted_text import ANSI from prompt_toolkit.layout.screen import", "the address of a call instruction # r = re.search(pat, sec[i][1]) # call_addr", "from prompt_toolkit.key_binding import KeyBindings from prompt_toolkit.widgets import Frame, TextArea, Box from prompt_toolkit.completion import", "porta_str + '\\n\\n\\n' + portb_str + Fore.RESET + Style.RESET_ALL self.pins.text = ANSI(pin_str) #", "call address is 0x200-0x400, convert it to where the LUT function is stored", "pin states porta_str = '' portb_str = '' for i in range(32): bit", "body, self.log_window, self.prompt_window ]) layout = Layout(root_container, self.prompt) self.app = Application(layout=layout, key_bindings=self.kb, full_screen=True,", "# if call_addr >= 0x200 and call_addr < 0x400: # call_addr = 4*(call_addr", "fmt = Fore.YELLOW else: fmt = Fore.RED if i == self.server.current_cog: fmt +=", "!= 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_byte(args[0]) if (r[0]):", "ui_instance.update_log(r + \"\\n\", Fore.RED) def on_break(self, args): ui_instance = UI.instance r = ui_instance.server.breakpoint(args[0])", "else: ui_instance.update_log(\"byte @ {:#02x} -> {:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\") def on_getlong(self, args):", "= int((i - section_addr)/4) else: data_str += ' '*self.pc_cursor_size + inst + Style.RESET_ALL", "0x400: # call_addr = 4*(call_addr - 0x200) + 0x200 # call_dest = p2tools.get_section(self.obj_data,", "[] for k in stat_dict: if k.startswith('_'): pass elif k == 'pc': stat_lines.append(\"{:", "= ui_instance.server.get_reg(args[0]) if (r[0]): ui_instance.update_log(r[0], Fore.RED) else: try: addr = int(args[0], 16) ui_instance.update_log(\"reg", "# draw the status dictionary stat_dict = vars(self.server.get_status()) stat_lines = [] for k", "in range(8): fmt = '' marker = '' if self.server.cog_states[i].get_state() == p2db_server.CogState.IDLE: fmt", "WordCompleter(list(self.commands.keys())) self.prompt = TextArea( height=1, prompt=\"p2db > \", multiline=False, wrap_lines=False, complete_while_typing=True, completer=cmd_completer, accept_handler", "dissassembly window # get the function the current PC is in pc =", "pc) self.instructions.text = ANSI(s) self.function_header.text = ANSI(func_name) else: self.status.text = \"*** No connection", "import ANSI from prompt_toolkit.layout.screen import Char from colorama import Fore, Style import threading", "1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_reg(args[0]) if (r[0]): ui_instance.update_log(r[0],", "on_reset(self, args): ui_instance = UI.instance ui_instance.update_log('reset unimplemented\\n') def on_quit(self, args): ui_instance = UI.instance", "if k.startswith('_'): pass elif k == 'pc': stat_lines.append(\"{: >30} : {: <#8x}\".format(k, stat_dict[k]))", "with 'cogaddr' to see disassembly\" + Fore.RESET) self.instructions.text = ANSI(\"\") else: s =", "stat_dict = vars(self.server.get_status()) stat_lines = [] for k in stat_dict: if k.startswith('_'): pass", "'X' color = Fore.LIGHTBLACK_EX portb_str += color + \"{0: <3}\".format(bit) pin_str = porta_str", "def log_cursor_pos(): y = self.log.text.value.count('\\n') return Point(0, y) self.log = FormattedTextControl(ANSI(\"\"), get_cursor_position=log_cursor_pos) self.log_area", "if 'calla' in sec[i][1]: # pat = r'^(.*?) #\\\\([0-9]+)(.*?)' # pattern to get", "= Window(self.log) # prompt stuff cmd_completer = WordCompleter(list(self.commands.keys())) self.prompt = TextArea( height=1, prompt=\"p2db", "> \", multiline=False, wrap_lines=False, complete_while_typing=True, completer=cmd_completer, accept_handler = self.accept, focus_on_click=True, ) # status", "1 argument\\n\", Fore.RED) return r = ui_instance.server.get_byte(args[0]) if (r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED)", "Quit ''' pc_cursor_string = Fore.CYAN + \" ---> \" + Fore.RESET pc_cursor_size =", "section. place the cursor string at PTR ''' data_str = '' section_addr =", ": (unimplemented) Continue execution. Cog will be disconnected until it interrupts itself reset", "== self.server.current_cog: fmt += Style.BRIGHT marker = '*' conn_str += fmt + '{:", "argument\\n\", Fore.RED) return r = ui_instance.server.get_reg(args[0]) if (r[0]): ui_instance.update_log(r[0], Fore.RED) else: try: addr", "ui_instance = UI.instance ui_instance.update_log('reset unimplemented\\n') def on_quit(self, args): ui_instance = UI.instance ui_instance.app.exit() def", "except ValueError: ui_instance.update_log(\"{} -> {:#02x}\".format(args[0], r[1]) + \"\\n\") def on_getbyte(self, args): ui_instance =", "+ self.pc_cursor_string + inst + Style.RESET_ALL self.pc_line = int((i - section_addr)/4) else: data_str", "do_redraw = False while(1): if (self.server.stat_dirty or self.dirty): self.render_lock.acquire() self.server.stat_dirty = False do_redraw", "self.on_stepin, \"stepout\": self.on_stepout, \"break\": self.on_break, \"getreg\": self.on_getreg, \"getbyte\": self.on_getbyte, \"getlong\": self.on_getlong, \"continue\": self.on_continue,", "r[1]) + \"\\n\") except ValueError: ui_instance.update_log(\"{} -> {:#02x}\".format(args[0], r[1]) + \"\\n\") def on_getbyte(self,", "ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-t') def on_stepin(self, args=[]): ui_instance = UI.instance r =", "' '*self.pc_cursor_size + inst + Style.RESET_ALL return data_str def prerender(self, app): self.render_lock.acquire() def", "ui_instance.app.exit() def accept(self, buff): cmd = self.prompt.text args = cmd.split(' ') if args[0]", "Char from colorama import Fore, Style import threading import logging import time import", "pc = stat.get_mem_pc() cog_mode = stat.exec_mode == \"cogex\" func_name = '' for sec", "prompt_toolkit.layout import FormattedTextControl, WindowAlign from prompt_toolkit.key_binding import KeyBindings from prompt_toolkit.widgets import Frame, TextArea,", "itself reset : (unimplemented) Reload the current program quit [Ctrl+Q] : Quit '''", "sec[i][0], Fore.LIGHTGREEN_EX, sec[i][1], Fore.RESET) elif 'jmp' in sec[i][1] or 'tj' in sec[i][1] or", "from prompt_toolkit import Application from prompt_toolkit.layout.containers import VSplit, HSplit, Window from prompt_toolkit.layout.layout import", "expected numeric argument\\n\", Fore.RED) return ui_instance.server.cog_states[ui_instance.server.current_cog].status.set_cog_addr(addr) @kb.add('c-p') def on_pins(self, args=[]): ui_instance = UI.instance", "Window(self.log) # prompt stuff cmd_completer = WordCompleter(list(self.commands.keys())) self.prompt = TextArea( height=1, prompt=\"p2db >", "color = Fore.RED + Style.BRIGHT else: color = Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.ina", "ANSI(s) self.function_header.text = ANSI(func_name) else: self.status.text = \"*** No connection to cog\" self.function_header.text", "instruction. Call instructions are stepped over. Modifier instructions (augd/s, setq) will be skipped.", "<addr> : Set breakpoint at 'addr' and continue. 'addr' should be in hex", "LUT function is stored in HUB ram # if call_addr >= 0x200 and", "the current program quit [Ctrl+Q] : Quit ''' pc_cursor_string = Fore.CYAN + \"", "'reg'. 'reg' can be an address or register name. Address should be in", "new_text, color=\"\"): self.log.text = ANSI(self.log.text.value + color + new_text + Fore.RESET) def get_section_str(self,", "<3}\".format(bit) pin_str = porta_str + '\\n\\n\\n' + portb_str + Fore.RESET + Style.RESET_ALL self.pins.text", "get the address of a call instruction # r = re.search(pat, sec[i][1]) #", "= True self.render_lock = threading.Lock() # dict of commands and handler function for", "range(8): fmt = '' marker = '' if self.server.cog_states[i].get_state() == p2db_server.CogState.IDLE: fmt =", "WordCompleter from prompt_toolkit.data_structures import Point from prompt_toolkit.formatted_text import ANSI from prompt_toolkit.layout.screen import Char", "getreg <reg> : Get the value in 'reg'. 'reg' can be an address", "and continue. 'addr' should be in hex getreg <reg> : Get the value", "= server self.obj_data = objdata self.app = Application(full_screen=True) self.current_func = '' self.dirty =", "---- help : Print this dialog step [Ctrl+S] : Step by one instruction.", "[Ctrl+T] : Step into a function call stepout [Ctrl+O] : Step out of", "self.server.have_pin_data: bit = 'X' color = Fore.LIGHTBLACK_EX portb_str += color + \"{0: <3}\".format(bit)", "self.log = FormattedTextControl(ANSI(\"\"), get_cursor_position=log_cursor_pos) self.log_area = Window(self.log) # prompt stuff cmd_completer = WordCompleter(list(self.commands.keys()))", "'{: >10}'.format('{} Cog {}\\n'.format(marker, i)) + Style.RESET_ALL + Fore.RESET self.connection.text = ANSI(conn_str) #", "expected 1 argument\\n\", Fore.RED) return try: addr = int(args[0], 16) except ValueError: ui_instance.update_log(\"Error:", "address of a call instruction # r = re.search(pat, sec[i][1]) # call_addr =", "'' if (self.server.dira >> i) & 1: color = Fore.RED + Style.BRIGHT else:", "self.pc_cursor_string + inst + Style.RESET_ALL self.pc_line = int((i - section_addr)/4) else: data_str +=", "where the LUT function is stored in HUB ram # if call_addr >=", "Print this dialog step [Ctrl+S] : Step by one instruction. Call instructions are", "Continue execution. Cog will be disconnected until it interrupts itself reset : (unimplemented)", "= sec['section_addr'] for i in range(section_addr, section_addr + 4*(len(sec) - 1), 4): inst", "'reg' can be an address or register name. Address should be in hex", "re from . import p2tools from . import p2db_server log = logging.getLogger('main') Char.display_mappings['\\t']", "1 argument\\n\", Fore.RED) return try: addr = int(args[0], 16) except ValueError: ui_instance.update_log(\"Error: expected", "conn_str += fmt + '{: >10}'.format('{} Cog {}\\n'.format(marker, i)) + Style.RESET_ALL + Fore.RESET", "{:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.LIGHTGREEN_EX, sec[i][1], Fore.RESET) elif 'jmp' in sec[i][1] or 'tj' in", "def update_log(self, new_text, color=\"\"): self.log.text = ANSI(self.log.text.value + color + new_text + Fore.RESET)", "full_screen=True, before_render=self.prerender, after_render=self.postrender) self.app.layout.focus(self.prompt_window) @kb.add('c-c') @kb.add('c-q') def exit_(event): event.app.exit() def on_help(self, args): ui_instance", "[Ctrl+O] : Step out of the current function call break <addr> : Set", "prompt_toolkit.layout.screen import Char from colorama import Fore, Style import threading import logging import", "conn_str = '' for i in range(8): fmt = '' marker = ''", "self.function_header.text = ANSI(func_name) else: self.status.text = \"*** No connection to cog\" self.function_header.text =", "self.log.text.value.count('\\n') return Point(0, y) self.log = FormattedTextControl(ANSI(\"\"), get_cursor_position=log_cursor_pos) self.log_area = Window(self.log) # prompt", "{}{}{}{}\\n\".format(i, sec[i][0], Fore.LIGHTGREEN_EX, sec[i][1], Fore.RESET) elif 'jmp' in sec[i][1] or 'tj' in sec[i][1]", "convert it to where the LUT function is stored in HUB ram #", "self.commands[args[0]](args[1:]) else: self.update_log(\"Unknown command: \" + args[0] + \"\\n\", Fore.RED) self.dirty = True", "at 'addr' and continue. 'addr' should be in hex getreg <reg> : Get", "prompt stuff cmd_completer = WordCompleter(list(self.commands.keys())) self.prompt = TextArea( height=1, prompt=\"p2db > \", multiline=False,", "sec[i][1], Fore.RESET) elif 'jmp' in sec[i][1] or 'tj' in sec[i][1] or 'dj' in", "should be in hex getbyte <addr> : Get the byte at hub address", "be skipped. stepin [Ctrl+T] : Step into a function call stepout [Ctrl+O] :", "<addr> : Get the byte at hub address 'addr'. Address should be in", "0x200-0x400, convert it to where the LUT function is stored in HUB ram", "data_str += Style.BRIGHT + self.pc_cursor_string + inst + Style.RESET_ALL self.pc_line = int((i -", "WindowAlign from prompt_toolkit.key_binding import KeyBindings from prompt_toolkit.widgets import Frame, TextArea, Box from prompt_toolkit.completion", "at PTR ''' data_str = '' section_addr = sec['section_addr'] for i in range(section_addr,", "\"Source\") self.log_window = Frame(Box(self.log_area, padding=1, padding_bottom=0)) self.prompt_window = Frame(self.prompt) body = VSplit([ self.cog_status_window,", "ui_instance = UI.instance ui_instance.update_log(ui_instance.help_text + \"\\n\") @kb.add('c-s') def on_step(self, args=[]): ui_instance = UI.instance", "int(args[0], 16) except ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED) return ui_instance.server.cog_states[ui_instance.server.current_cog].status.set_cog_addr(addr) @kb.add('c-p') def", "= r'^(.*?) #\\\\([0-9]+)(.*?)' # pattern to get the address of a call instruction", "'' marker = '' if self.server.cog_states[i].get_state() == p2db_server.CogState.IDLE: fmt = Fore.GREEN elif self.server.cog_states[i].get_state()", "+ inst + Style.RESET_ALL self.pc_line = int((i - section_addr)/4) else: data_str += '", "import re from . import p2tools from . import p2db_server log = logging.getLogger('main')", "= UI.instance ui_instance.server.continue_exec() def on_reset(self, args): ui_instance = UI.instance ui_instance.update_log('reset unimplemented\\n') def on_quit(self,", "# else: # call_dest = '' inst = \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.LIGHTGREEN_EX,", "Set breakpoint at 'addr' and continue. 'addr' should be in hex getreg <reg>", "\" {:x}: {}{}\\n\".format(i, sec[i][0], sec[i][1]) if i == ptr: data_str += Style.BRIGHT +", "each section self.cog_status_window = Frame(Box(status_split, 1), \"Status\") self.instruction_window = Frame(Box(instruction_split, 1), \"Source\") self.log_window", "+ Style.RESET_ALL self.pc_line = int((i - section_addr)/4) else: data_str += ' '*self.pc_cursor_size +", "ui_instance.update_log(\"reg {:#02x} -> {:#02x}\".format(addr, r[1]) + \"\\n\") except ValueError: ui_instance.update_log(\"{} -> {:#02x}\".format(args[0], r[1])", "address or register name. Address should be in hex getbyte <addr> : Get", "ui_instance.update_log(\"byte @ {:#02x} -> {:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\") def on_getlong(self, args): ui_instance", "'X' color = Fore.LIGHTBLACK_EX porta_str += color + \"{0: <3}\".format(bit) if (self.server.dirb >>", "= '\\n'.join(stat_lines) self.status.text = stat_text # draw cog connections status's conn_str = ''", "prompt_toolkit.widgets import Frame, TextArea, Box from prompt_toolkit.completion import WordCompleter from prompt_toolkit.data_structures import Point", "if (r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else: ui_instance.update_log(\"byte @ {:#02x} -> {:#02x}\".format(int(args[0], 16),", "after_render=self.postrender) self.app.layout.focus(self.prompt_window) @kb.add('c-c') @kb.add('c-q') def exit_(event): event.app.exit() def on_help(self, args): ui_instance = UI.instance", "in sec[i][1]: # pat = r'^(.*?) #\\\\([0-9]+)(.*?)' # pattern to get the address", "is 0x200-0x400, convert it to where the LUT function is stored in HUB", ": Get the long at hub address 'addr'. Address should be in hex", "import p2tools from . import p2db_server log = logging.getLogger('main') Char.display_mappings['\\t'] = '\\t' class", "{:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\") def on_cog(self, args): ui_instance = UI.instance if len(args)", "on_break(self, args): ui_instance = UI.instance r = ui_instance.server.breakpoint(args[0]) if r: ui_instance.update_log(r + \"\\n\",", "ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED) return ui_instance.server.cog_states[ui_instance.server.current_cog].status.set_cog_addr(addr) @kb.add('c-p') def on_pins(self, args=[]): ui_instance", "section self.cog_status_window = Frame(Box(status_split, 1), \"Status\") self.instruction_window = Frame(Box(instruction_split, 1), \"Source\") self.log_window =", "function call stepout [Ctrl+O] : Step out of the current function call break", "expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_reg(args[0]) if (r[0]): ui_instance.update_log(r[0], Fore.RED) else:", "r = ui_instance.server.get_long(args[0]) if (r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else: ui_instance.update_log(\"long @ {:#02x}", "+ '{: >10}'.format('{} Cog {}\\n'.format(marker, i)) + Style.RESET_ALL + Fore.RESET self.connection.text = ANSI(conn_str)", "return try: addr = int(args[0], 16) except ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED)", "self.status = FormattedTextControl(ANSI('')) self.connection = FormattedTextControl(ANSI('')) self.pins = FormattedTextControl(ANSI('')) status_split = HSplit([ VSplit([", "'addr' and continue. 'addr' should be in hex getreg <reg> : Get the", "the byte at hub address 'addr'. Address should be in hex getlong <addr>", "ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return try: cog_num = int(args[0]) except ValueError: ui_instance.update_log(\"Error:", "prompt_toolkit.data_structures import Point from prompt_toolkit.formatted_text import ANSI from prompt_toolkit.layout.screen import Char from colorama", "prompt_toolkit.key_binding import KeyBindings from prompt_toolkit.widgets import Frame, TextArea, Box from prompt_toolkit.completion import WordCompleter", "in sec[i][1] or 'dj' in sec[i][1]: inst = \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.CYAN,", "ANSI(conn_str) # draw the pin states porta_str = '' portb_str = '' for", "self.instructions = FormattedTextControl(ANSI(''), focusable=True, get_cursor_position=inst_cursor_pos) self.function_header = FormattedTextControl(ANSI('')) instruction_split = HSplit([ Box(Window(self.function_header, height=1),", "if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_long(args[0])", "\"pins\": self.on_pins, \"cog\": self.on_cog, \"cogaddr\": self.on_cogaddr, \"reset\": self.on_reset, \"quit\": self.on_quit, \"help\": self.on_help }", "= int(args[0], 16) except ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED) return ui_instance.server.cog_states[ui_instance.server.current_cog].status.set_cog_addr(addr) @kb.add('c-p')", "else: color = Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.inb >> i) & 1: bit", "< 0x400: # call_addr = 4*(call_addr - 0x200) + 0x200 # call_dest =", "= int(args[0]) except ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED) return ui_instance.server.set_cog(cog_num) def on_cogaddr(self,", "int(args[0]) except ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED) return ui_instance.server.set_cog(cog_num) def on_cogaddr(self, args):", "'' self.dirty = True self.render_lock = threading.Lock() # dict of commands and handler", "try: addr = int(args[0], 16) ui_instance.update_log(\"reg {:#02x} -> {:#02x}\".format(addr, r[1]) + \"\\n\") except", "def prerender(self, app): self.render_lock.acquire() def postrender(self, app): self.render_lock.release() def data_updater(self): do_redraw = False", "+ \"\\n\", Fore.RED) def on_break(self, args): ui_instance = UI.instance r = ui_instance.server.breakpoint(args[0]) if", "self.prompt.text args = cmd.split(' ') if args[0] in self.commands: self.commands[args[0]](args[1:]) else: self.update_log(\"Unknown command:", "= logging.getLogger('main') Char.display_mappings['\\t'] = '\\t' class UI: kb = KeyBindings() help_text = '''", "argument\\n\", Fore.RED) return r = ui_instance.server.get_long(args[0]) if (r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else:", "= '' if self.server.cog_states[i].get_state() == p2db_server.CogState.IDLE: fmt = Fore.GREEN elif self.server.cog_states[i].get_state() == p2db_server.CogState.EXECUTING:", "- 1), 4): inst = \" {:x}: {} {}\\n\".format(i, sec[i][0], sec[i][1]) if 'call'", "Step by one instruction. Call instructions are stepped over. Modifier instructions (augd/s, setq)", "= UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return try:", "ValueError: ui_instance.update_log(\"{} -> {:#02x}\".format(args[0], r[1]) + \"\\n\") def on_getbyte(self, args): ui_instance = UI.instance", "if r: ui_instance.update_log(r + \"\\n\", Fore.RED) def on_getreg(self, args): ui_instance = UI.instance if", "-1: self.function_header.text = ANSI(Fore.YELLOW + \"Cog Execution Mode. Set base address with 'cogaddr'", "r: ui_instance.update_log(r + \"\\n\", Fore.RED) def on_break(self, args): ui_instance = UI.instance r =", "app): self.render_lock.acquire() def postrender(self, app): self.render_lock.release() def data_updater(self): do_redraw = False while(1): if", "Modifier instructions (augd/s, setq) will be skipped. stepin [Ctrl+T] : Step into a", "getbyte <addr> : Get the byte at hub address 'addr'. Address should be", "r'^(.*?) #\\\\([0-9]+)(.*?)' # pattern to get the address of a call instruction #", "Reload the current program quit [Ctrl+Q] : Quit ''' pc_cursor_string = Fore.CYAN +", "+ Fore.RESET pc_cursor_size = 10 instance = None def __init__(self, server: p2db_server.P2DBServer, objdata):", "args[0] + \"\\n\", Fore.RED) self.dirty = True @kb.add('c-i') def shift_focus(e): e.app.layout.focus_next() def update_log(self,", "Fore.LIGHTGREEN_EX, sec[i][1], Fore.RESET) elif 'jmp' in sec[i][1] or 'tj' in sec[i][1] or 'dj'", "\"*** No connection to cog\" self.function_header.text = ANSI(\"*** No connection to cog\") #", "= \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.CYAN, sec[i][1], Fore.RESET) else: inst = \" {:x}:", "marker = '' if self.server.cog_states[i].get_state() == p2db_server.CogState.IDLE: fmt = Fore.GREEN elif self.server.cog_states[i].get_state() ==", "command: \" + args[0] + \"\\n\", Fore.RED) self.dirty = True @kb.add('c-i') def shift_focus(e):", "= FormattedTextControl(ANSI('')) instruction_split = HSplit([ Box(Window(self.function_header, height=1), 1, padding_top=0), Box(Window(self.instructions, height=40), 1) ])", "Call instructions are stepped over. Modifier instructions (augd/s, setq) will be skipped. stepin", "# instruction window stuff def inst_cursor_pos(): y = max(0, min(self.pc_line, self.instructions.text.value.count('\\n'))) return Point(0,", "+ Style.BRIGHT if (self.server.inb >> i) & 1: bit = 'H' else: bit", "self.server.cog_states[i].get_state() == p2db_server.CogState.IDLE: fmt = Fore.GREEN elif self.server.cog_states[i].get_state() == p2db_server.CogState.EXECUTING: fmt = Fore.YELLOW", "bit = 'L' if not self.server.have_pin_data: bit = 'X' color = Fore.LIGHTBLACK_EX portb_str", "== \"cogex\" func_name = '' for sec in self.obj_data: if pc in self.obj_data[sec]:", "logging import time import re from . import p2tools from . import p2db_server", "None def __init__(self, server: p2db_server.P2DBServer, objdata): assert(not UI.instance) UI.instance = self self.server =", "a function call stepout [Ctrl+O] : Step out of the current function call", "= ANSI(s) self.function_header.text = ANSI(func_name) else: self.status.text = \"*** No connection to cog\"", "draw the pin states porta_str = '' portb_str = '' for i in", ": {: <#8x}\".format(k, stat_dict[k])) else: stat_lines.append(\"{: >30} : {!s: <8}\".format(k, stat_dict[k])) stat_text =", "p2tools from . import p2db_server log = logging.getLogger('main') Char.display_mappings['\\t'] = '\\t' class UI:", "Style.BRIGHT else: color = Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.inb >> i) & 1:", "UI.instance ui_instance.app.exit() def accept(self, buff): cmd = self.prompt.text args = cmd.split(' ') if", "+ Style.BRIGHT if (self.server.ina >> i) & 1: bit = 'H' else: bit", "& 1: color = Fore.RED + Style.BRIGHT else: color = Fore.LIGHTBLUE_EX + Style.BRIGHT", "stuff self.status = FormattedTextControl(ANSI('')) self.connection = FormattedTextControl(ANSI('')) self.pins = FormattedTextControl(ANSI('')) status_split = HSplit([", "(self.server.dirb >> i) & 1: color = Fore.RED + Style.BRIGHT else: color =", "1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_byte(args[0]) if (r[0]): ui_instance.update_log(r[0]", "p2db_server.CogState.EXECUTING: fmt = Fore.YELLOW else: fmt = Fore.RED if i == self.server.current_cog: fmt", "+ \"{0: <3}\".format(bit) pin_str = porta_str + '\\n\\n\\n' + portb_str + Fore.RESET +", "# get the log data while not self.server.log_queue.empty(): c = self.server.log_queue.get() if c", "]) layout = Layout(root_container, self.prompt) self.app = Application(layout=layout, key_bindings=self.kb, full_screen=True, before_render=self.prerender, after_render=self.postrender) self.app.layout.focus(self.prompt_window)", "padding_top=0), Box(Window(self.instructions, height=40), 1) ]) # Frames for each section self.cog_status_window = Frame(Box(status_split,", "''' return a atring for all instructions in a given section. place the", "args): ui_instance = UI.instance ui_instance.app.exit() def accept(self, buff): cmd = self.prompt.text args =", "= Frame(Box(instruction_split, 1), \"Source\") self.log_window = Frame(Box(self.log_area, padding=1, padding_bottom=0)) self.prompt_window = Frame(self.prompt) body", "return r = ui_instance.server.get_long(args[0]) if (r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else: ui_instance.update_log(\"long @", "instructions (augd/s, setq) will be skipped. stepin [Ctrl+T] : Step into a function", "self.instructions.text = ANSI(s) self.function_header.text = ANSI(func_name) else: self.status.text = \"*** No connection to", "section_addr = sec['section_addr'] for i in range(section_addr, section_addr + 4*(len(sec) - 1), 4):", "n cogaddr <addr> : Set the cog execution address (for native cogs) continue", "in hex getlong <addr> : Get the long at hub address 'addr'. Address", "call_addr >= 0x200 and call_addr < 0x400: # call_addr = 4*(call_addr - 0x200)", "step [Ctrl+S] : Step by one instruction. Call instructions are stepped over. Modifier", "address with 'cogaddr' to see disassembly\" + Fore.RESET) self.instructions.text = ANSI(\"\") else: s", "= ui_instance.server.get_byte(args[0]) if (r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else: ui_instance.update_log(\"byte @ {:#02x} ->", "{} {}\\n\".format(i, sec[i][0], sec[i][1]) if 'call' in sec[i][1]: # if 'calla' in sec[i][1]:", "= \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.LIGHTGREEN_EX, sec[i][1], Fore.RESET) elif 'jmp' in sec[i][1] or", "\"reset\": self.on_reset, \"quit\": self.on_quit, \"help\": self.on_help } # log stuff def log_cursor_pos(): y", "return data_str def prerender(self, app): self.render_lock.acquire() def postrender(self, app): self.render_lock.release() def data_updater(self): do_redraw", "Mode. Set base address with 'cogaddr' to see disassembly\" + Fore.RESET) self.instructions.text =", "Address should be in hex getbyte <addr> : Get the byte at hub", "argument\\n\", Fore.RED) return r = ui_instance.server.get_byte(args[0]) if (r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else:", "get the function the current PC is in pc = stat.get_mem_pc() cog_mode =", "-> {:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\") def on_cog(self, args): ui_instance = UI.instance if", "colorama import Fore, Style import threading import logging import time import re from", "r[1]) + \"\\n\") def on_getbyte(self, args): ui_instance = UI.instance if len(args) != 1:", "Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.inb >> i) & 1: bit = 'H' else:", "self.server.cog_states[i].get_state() == p2db_server.CogState.EXECUTING: fmt = Fore.YELLOW else: fmt = Fore.RED if i ==", "cog <n> : Set the active cog to n cogaddr <addr> : Set", "!= 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_reg(args[0]) if (r[0]):", "\"\\n\") def on_cog(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected", "= ANSI(conn_str) # draw the pin states porta_str = '' portb_str = ''", "from . import p2db_server log = logging.getLogger('main') Char.display_mappings['\\t'] = '\\t' class UI: kb", "sec[i][1]: # pat = r'^(.*?) #\\\\([0-9]+)(.*?)' # pattern to get the address of", "self.instructions.text = ANSI(\"\") else: s = self.get_section_str(section, pc) self.instructions.text = ANSI(s) self.function_header.text =", "{}{}\\n\".format(i, sec[i][0], sec[i][1]) if i == ptr: data_str += Style.BRIGHT + self.pc_cursor_string +", "completer=cmd_completer, accept_handler = self.accept, focus_on_click=True, ) # status window stuff self.status = FormattedTextControl(ANSI(''))", "register name. Address should be in hex getbyte <addr> : Get the byte", "1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_long(args[0]) if (r[0]): ui_instance.update_log(r[0]", "16), r[1]) + \"\\n\") def on_getlong(self, args): ui_instance = UI.instance if len(args) !=", "def on_continue(self, args=[]): ui_instance = UI.instance ui_instance.server.continue_exec() def on_reset(self, args): ui_instance = UI.instance", "import Char from colorama import Fore, Style import threading import logging import time", "or self.dirty): self.render_lock.acquire() self.server.stat_dirty = False do_redraw = True stat = self.server.get_status() if", "or register name. Address should be in hex getbyte <addr> : Get the", "inst + Style.RESET_ALL return data_str def prerender(self, app): self.render_lock.acquire() def postrender(self, app): self.render_lock.release()", "argument\\n\", Fore.RED) return try: cog_num = int(args[0]) except ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\",", "connection to cog\" self.function_header.text = ANSI(\"*** No connection to cog\") # get the", "from prompt_toolkit.data_structures import Point from prompt_toolkit.formatted_text import ANSI from prompt_toolkit.layout.screen import Char from", "= False while(1): if (self.server.stat_dirty or self.dirty): self.render_lock.acquire() self.server.stat_dirty = False do_redraw =", "Fore.RESET) self.instructions.text = ANSI(\"\") else: s = self.get_section_str(section, pc) self.instructions.text = ANSI(s) self.function_header.text", "Fore.CYAN + \" ---> \" + Fore.RESET pc_cursor_size = 10 instance = None", ": Step into a function call stepout [Ctrl+O] : Step out of the", "self.server = server self.obj_data = objdata self.app = Application(full_screen=True) self.current_func = '' self.dirty", ": Get the byte at hub address 'addr'. Address should be in hex", "out of the current function call break <addr> : Set breakpoint at 'addr'", "status_split = HSplit([ VSplit([ Window(self.status), Box(Window(self.connection, align=WindowAlign.RIGHT), 3, padding_top=0) ]), Frame(Box(Window(self.pins, width=95, height=5),", "padding_top=0) ]), Frame(Box(Window(self.pins, width=95, height=5), padding=3, padding_bottom=0, padding_top=1), \"Pins\") ]) # instruction window", "\" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.CYAN, sec[i][1], Fore.RESET) else: inst = \" {:x}: {}{}\\n\".format(i,", "given section. place the cursor string at PTR ''' data_str = '' section_addr", "kb = KeyBindings() help_text = ''' p2db ---- help : Print this dialog", "if c != '\\r': self.update_log(c, Fore.LIGHTGREEN_EX) if do_redraw: self.render_lock.release() self.app.invalidate() do_redraw = False", "FormattedTextControl, WindowAlign from prompt_toolkit.key_binding import KeyBindings from prompt_toolkit.widgets import Frame, TextArea, Box from", "\" + Fore.RESET pc_cursor_size = 10 instance = None def __init__(self, server: p2db_server.P2DBServer,", "do_redraw: self.render_lock.release() self.app.invalidate() do_redraw = False time.sleep(0.02) def run(self): t = threading.Thread(target=self.data_updater, daemon=True)", "new_text + Fore.RESET) def get_section_str(self, sec, ptr): ''' return a atring for all", "if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_reg(args[0])", "args=[]): ui_instance = UI.instance ui_instance.server.update_pins() def on_continue(self, args=[]): ui_instance = UI.instance ui_instance.server.continue_exec() def", "self.commands = { \"step\": self.on_step, \"stepin\": self.on_stepin, \"stepout\": self.on_stepout, \"break\": self.on_break, \"getreg\": self.on_getreg,", "= int(args[0], 16) ui_instance.update_log(\"reg {:#02x} -> {:#02x}\".format(addr, r[1]) + \"\\n\") except ValueError: ui_instance.update_log(\"{}", "connections status's conn_str = '' for i in range(8): fmt = '' marker", "Fore.RED) return r = ui_instance.server.get_long(args[0]) if (r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else: ui_instance.update_log(\"long", "window stuff def inst_cursor_pos(): y = max(0, min(self.pc_line, self.instructions.text.value.count('\\n'))) return Point(0, y) self.pc_line", "Execution Mode. Set base address with 'cogaddr' to see disassembly\" + Fore.RESET) self.instructions.text", "ANSI(\"\") else: s = self.get_section_str(section, pc) self.instructions.text = ANSI(s) self.function_header.text = ANSI(func_name) else:", "\"{0: <3}\".format(bit) if (self.server.dirb >> i) & 1: color = Fore.RED + Style.BRIGHT", "call_addr != 0: # # if call address is 0x200-0x400, convert it to", "{}\\n'.format(marker, i)) + Style.RESET_ALL + Fore.RESET self.connection.text = ANSI(conn_str) # draw the pin", "i in range(32): bit = '' if (self.server.dira >> i) & 1: color", "+ Style.RESET_ALL + Fore.RESET self.connection.text = ANSI(conn_str) # draw the pin states porta_str", "Char.display_mappings['\\t'] = '\\t' class UI: kb = KeyBindings() help_text = ''' p2db ----", "= ui_instance.server.get_long(args[0]) if (r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else: ui_instance.update_log(\"long @ {:#02x} ->", "pc_cursor_string = Fore.CYAN + \" ---> \" + Fore.RESET pc_cursor_size = 10 instance", "Fore.RESET) def get_section_str(self, sec, ptr): ''' return a atring for all instructions in", "HSplit, Window from prompt_toolkit.layout.layout import Layout from prompt_toolkit.layout import FormattedTextControl, WindowAlign from prompt_toolkit.key_binding", "\"quit\": self.on_quit, \"help\": self.on_help } # log stuff def log_cursor_pos(): y = self.log.text.value.count('\\n')", "unimplemented\\n') def on_quit(self, args): ui_instance = UI.instance ui_instance.app.exit() def accept(self, buff): cmd =", "= '*' conn_str += fmt + '{: >10}'.format('{} Cog {}\\n'.format(marker, i)) + Style.RESET_ALL", "0: # # if call address is 0x200-0x400, convert it to where the", "<reg> : Get the value in 'reg'. 'reg' can be an address or", "Box(Window(self.connection, align=WindowAlign.RIGHT), 3, padding_top=0) ]), Frame(Box(Window(self.pins, width=95, height=5), padding=3, padding_bottom=0, padding_top=1), \"Pins\") ])", "return Point(0, y) self.pc_line = 0 self.instructions = FormattedTextControl(ANSI(''), focusable=True, get_cursor_position=inst_cursor_pos) self.function_header =", "pins : Update pin status data cog <n> : Set the active cog", "0x200 # call_dest = p2tools.get_section(self.obj_data, call_addr) # else: # call_dest = '' inst", "def postrender(self, app): self.render_lock.release() def data_updater(self): do_redraw = False while(1): if (self.server.stat_dirty or", "self.server.log_queue.get() if c != '\\r': self.update_log(c, Fore.LIGHTGREEN_EX) if do_redraw: self.render_lock.release() self.app.invalidate() do_redraw =", "be disconnected until it interrupts itself reset : (unimplemented) Reload the current program", "]) root_container = HSplit([ body, self.log_window, self.prompt_window ]) layout = Layout(root_container, self.prompt) self.app", "\"stepout\": self.on_stepout, \"break\": self.on_break, \"getreg\": self.on_getreg, \"getbyte\": self.on_getbyte, \"getlong\": self.on_getlong, \"continue\": self.on_continue, \"pins\":", "self.render_lock.acquire() def postrender(self, app): self.render_lock.release() def data_updater(self): do_redraw = False while(1): if (self.server.stat_dirty", "ptr: data_str += Style.BRIGHT + self.pc_cursor_string + inst + Style.RESET_ALL self.pc_line = int((i", "= '\\t' class UI: kb = KeyBindings() help_text = ''' p2db ---- help", "= Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.ina >> i) & 1: bit = 'H'", "# r = re.search(pat, sec[i][1]) # call_addr = int(r.group(2)) if r else 0", "the log data while not self.server.log_queue.empty(): c = self.server.log_queue.get() if c != '\\r':", "@kb.add('c-c') @kb.add('c-q') def exit_(event): event.app.exit() def on_help(self, args): ui_instance = UI.instance ui_instance.update_log(ui_instance.help_text +", "args=[]): ui_instance = UI.instance r = ui_instance.server.stepin() if r: ui_instance.update_log(r + \"\\n\", Fore.RED)", "ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else: ui_instance.update_log(\"byte @ {:#02x} -> {:#02x}\".format(int(args[0], 16), r[1]) +", ">30} : {: <#8x}\".format(k, stat_dict[k])) else: stat_lines.append(\"{: >30} : {!s: <8}\".format(k, stat_dict[k])) stat_text", "self.server.current_cog: fmt += Style.BRIGHT marker = '*' conn_str += fmt + '{: >10}'.format('{}", "\"\\n\", Fore.RED) self.dirty = True @kb.add('c-i') def shift_focus(e): e.app.layout.focus_next() def update_log(self, new_text, color=\"\"):", "cog_mode = stat.exec_mode == \"cogex\" func_name = '' for sec in self.obj_data: if", "skipped. stepin [Ctrl+T] : Step into a function call stepout [Ctrl+O] : Step", "# status window stuff self.status = FormattedTextControl(ANSI('')) self.connection = FormattedTextControl(ANSI('')) self.pins = FormattedTextControl(ANSI(''))", "else: fmt = Fore.RED if i == self.server.current_cog: fmt += Style.BRIGHT marker =", "import p2db_server log = logging.getLogger('main') Char.display_mappings['\\t'] = '\\t' class UI: kb = KeyBindings()", "portb_str = '' for i in range(32): bit = '' if (self.server.dira >>", "stat_text # draw cog connections status's conn_str = '' for i in range(8):", "and handler function for each self.commands = { \"step\": self.on_step, \"stepin\": self.on_stepin, \"stepout\":", "= '' for sec in self.obj_data: if pc in self.obj_data[sec]: section = self.obj_data[sec]", "if do_redraw: self.render_lock.release() self.app.invalidate() do_redraw = False time.sleep(0.02) def run(self): t = threading.Thread(target=self.data_updater,", "in a given section. place the cursor string at PTR ''' data_str =", "bit = 'L' if not self.server.have_pin_data: bit = 'X' color = Fore.LIGHTBLACK_EX porta_str", "for each section self.cog_status_window = Frame(Box(status_split, 1), \"Status\") self.instruction_window = Frame(Box(instruction_split, 1), \"Source\")", "wrap_lines=False, complete_while_typing=True, completer=cmd_completer, accept_handler = self.accept, focus_on_click=True, ) # status window stuff self.status", "ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-o') def on_stepout(self, args=[]): ui_instance = UI.instance r =", "vars(self.server.get_status()) stat_lines = [] for k in stat_dict: if k.startswith('_'): pass elif k", "UI.instance ui_instance.update_log('reset unimplemented\\n') def on_quit(self, args): ui_instance = UI.instance ui_instance.app.exit() def accept(self, buff):", "def on_quit(self, args): ui_instance = UI.instance ui_instance.app.exit() def accept(self, buff): cmd = self.prompt.text", "cursor string at PTR ''' data_str = '' section_addr = sec['section_addr'] for i", "i == ptr: data_str += Style.BRIGHT + self.pc_cursor_string + inst + Style.RESET_ALL self.pc_line", "ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return", "False do_redraw = True stat = self.server.get_status() if (stat): # draw the status", "+ inst + Style.RESET_ALL return data_str def prerender(self, app): self.render_lock.acquire() def postrender(self, app):", "\"\\n\") def on_getlong(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected", "stat_lines.append(\"{: >30} : {!s: <8}\".format(k, stat_dict[k])) stat_text = '\\n'.join(stat_lines) self.status.text = stat_text #", "self.on_getbyte, \"getlong\": self.on_getlong, \"continue\": self.on_continue, \"pins\": self.on_pins, \"cog\": self.on_cog, \"cogaddr\": self.on_cogaddr, \"reset\": self.on_reset,", "be in hex getreg <reg> : Get the value in 'reg'. 'reg' can", "i == self.server.current_cog: fmt += Style.BRIGHT marker = '*' conn_str += fmt +", "color + new_text + Fore.RESET) def get_section_str(self, sec, ptr): ''' return a atring", "self.on_stepout, \"break\": self.on_break, \"getreg\": self.on_getreg, \"getbyte\": self.on_getbyte, \"getlong\": self.on_getlong, \"continue\": self.on_continue, \"pins\": self.on_pins,", "from colorama import Fore, Style import threading import logging import time import re", "self.function_header = FormattedTextControl(ANSI('')) instruction_split = HSplit([ Box(Window(self.function_header, height=1), 1, padding_top=0), Box(Window(self.instructions, height=40), 1)", "and call_addr < 0x400: # call_addr = 4*(call_addr - 0x200) + 0x200 #", ": (unimplemented) Reload the current program quit [Ctrl+Q] : Quit ''' pc_cursor_string =", "self.server.stat_dirty = False do_redraw = True stat = self.server.get_status() if (stat): # draw", "UI.instance r = ui_instance.server.stepout() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) def on_break(self, args):", "ui_instance.server.continue_exec() def on_reset(self, args): ui_instance = UI.instance ui_instance.update_log('reset unimplemented\\n') def on_quit(self, args): ui_instance", "def on_cogaddr(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1", ">= 0x200 and call_addr < 0x400: # call_addr = 4*(call_addr - 0x200) +", "\"cogaddr\": self.on_cogaddr, \"reset\": self.on_reset, \"quit\": self.on_quit, \"help\": self.on_help } # log stuff def", "<gh_stars>0 from prompt_toolkit import Application from prompt_toolkit.layout.containers import VSplit, HSplit, Window from prompt_toolkit.layout.layout", "# if 'calla' in sec[i][1]: # pat = r'^(.*?) #\\\\([0-9]+)(.*?)' # pattern to", "break <addr> : Set breakpoint at 'addr' and continue. 'addr' should be in", "= FormattedTextControl(ANSI(''), focusable=True, get_cursor_position=inst_cursor_pos) self.function_header = FormattedTextControl(ANSI('')) instruction_split = HSplit([ Box(Window(self.function_header, height=1), 1,", "# Frames for each section self.cog_status_window = Frame(Box(status_split, 1), \"Status\") self.instruction_window = Frame(Box(instruction_split,", "continue : (unimplemented) Continue execution. Cog will be disconnected until it interrupts itself", "= ui_instance.server.breakpoint(args[0]) if r: ui_instance.update_log(r + \"\\n\", Fore.RED) def on_getreg(self, args): ui_instance =", "if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return try: addr =", "Style.RESET_ALL + Fore.RESET self.connection.text = ANSI(conn_str) # draw the pin states porta_str =", "{:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\") def on_getlong(self, args): ui_instance = UI.instance if len(args)", "call_addr < 0x400: # call_addr = 4*(call_addr - 0x200) + 0x200 # call_dest", "'tj' in sec[i][1] or 'dj' in sec[i][1]: inst = \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0],", "!= '\\r': self.update_log(c, Fore.LIGHTGREEN_EX) if do_redraw: self.render_lock.release() self.app.invalidate() do_redraw = False time.sleep(0.02) def", "Fore.RED) return ui_instance.server.set_cog(cog_num) def on_cogaddr(self, args): ui_instance = UI.instance if len(args) != 1:", "UI.instance) UI.instance = self self.server = server self.obj_data = objdata self.app = Application(full_screen=True)", "VSplit, HSplit, Window from prompt_toolkit.layout.layout import Layout from prompt_toolkit.layout import FormattedTextControl, WindowAlign from", "ui_instance = UI.instance ui_instance.server.continue_exec() def on_reset(self, args): ui_instance = UI.instance ui_instance.update_log('reset unimplemented\\n') def", ">30} : {!s: <8}\".format(k, stat_dict[k])) stat_text = '\\n'.join(stat_lines) self.status.text = stat_text # draw", "+ \"\\n\") except ValueError: ui_instance.update_log(\"{} -> {:#02x}\".format(args[0], r[1]) + \"\\n\") def on_getbyte(self, args):", "objdata): assert(not UI.instance) UI.instance = self self.server = server self.obj_data = objdata self.app", "def on_stepin(self, args=[]): ui_instance = UI.instance r = ui_instance.server.stepin() if r: ui_instance.update_log(r +", "+ Style.RESET_ALL self.pins.text = ANSI(pin_str) # update the dissassembly window # get the", "from prompt_toolkit.layout.screen import Char from colorama import Fore, Style import threading import logging", "quit [Ctrl+Q] : Quit ''' pc_cursor_string = Fore.CYAN + \" ---> \" +", "= Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.inb >> i) & 1: bit = 'H'", "argument\\n\", Fore.RED) return try: addr = int(args[0], 16) except ValueError: ui_instance.update_log(\"Error: expected numeric", "p2db ---- help : Print this dialog step [Ctrl+S] : Step by one", "= { \"step\": self.on_step, \"stepin\": self.on_stepin, \"stepout\": self.on_stepout, \"break\": self.on_break, \"getreg\": self.on_getreg, \"getbyte\":", "= [] for k in stat_dict: if k.startswith('_'): pass elif k == 'pc':", "]) # instruction window stuff def inst_cursor_pos(): y = max(0, min(self.pc_line, self.instructions.text.value.count('\\n'))) return", "!= 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return try: cog_num = int(args[0]) except", "Application(layout=layout, key_bindings=self.kb, full_screen=True, before_render=self.prerender, after_render=self.postrender) self.app.layout.focus(self.prompt_window) @kb.add('c-c') @kb.add('c-q') def exit_(event): event.app.exit() def on_help(self,", "status's conn_str = '' for i in range(8): fmt = '' marker =", "pc in self.obj_data[sec]: section = self.obj_data[sec] func_name = sec if cog_mode and stat.exec_mode", "cog connections status's conn_str = '' for i in range(8): fmt = ''", "= porta_str + '\\n\\n\\n' + portb_str + Fore.RESET + Style.RESET_ALL self.pins.text = ANSI(pin_str)", "self.connection = FormattedTextControl(ANSI('')) self.pins = FormattedTextControl(ANSI('')) status_split = HSplit([ VSplit([ Window(self.status), Box(Window(self.connection, align=WindowAlign.RIGHT),", "server: p2db_server.P2DBServer, objdata): assert(not UI.instance) UI.instance = self self.server = server self.obj_data =", "{:#02x}\".format(addr, r[1]) + \"\\n\") except ValueError: ui_instance.update_log(\"{} -> {:#02x}\".format(args[0], r[1]) + \"\\n\") def", "place the cursor string at PTR ''' data_str = '' section_addr = sec['section_addr']", "try: addr = int(args[0], 16) except ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED) return", "16) ui_instance.update_log(\"reg {:#02x} -> {:#02x}\".format(addr, r[1]) + \"\\n\") except ValueError: ui_instance.update_log(\"{} -> {:#02x}\".format(args[0],", "= self.server.log_queue.get() if c != '\\r': self.update_log(c, Fore.LIGHTGREEN_EX) if do_redraw: self.render_lock.release() self.app.invalidate() do_redraw", "section_addr + 4*(len(sec) - 1), 4): inst = \" {:x}: {} {}\\n\".format(i, sec[i][0],", "dictionary stat_dict = vars(self.server.get_status()) stat_lines = [] for k in stat_dict: if k.startswith('_'):", "self.prompt_window = Frame(self.prompt) body = VSplit([ self.cog_status_window, self.instruction_window, ]) root_container = HSplit([ body,", "Fore.RED) else: ui_instance.update_log(\"byte @ {:#02x} -> {:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\") def on_getlong(self,", "# call_addr = 4*(call_addr - 0x200) + 0x200 # call_dest = p2tools.get_section(self.obj_data, call_addr)", "else: s = self.get_section_str(section, pc) self.instructions.text = ANSI(s) self.function_header.text = ANSI(func_name) else: self.status.text", "@kb.add('c-o') def on_stepout(self, args=[]): ui_instance = UI.instance r = ui_instance.server.stepout() if r: ui_instance.update_log(r", "postrender(self, app): self.render_lock.release() def data_updater(self): do_redraw = False while(1): if (self.server.stat_dirty or self.dirty):", "'addr' should be in hex getreg <reg> : Get the value in 'reg'.", ">> i) & 1: color = Fore.RED + Style.BRIGHT else: color = Fore.LIGHTBLUE_EX", "{:#02x} -> {:#02x}\".format(addr, r[1]) + \"\\n\") except ValueError: ui_instance.update_log(\"{} -> {:#02x}\".format(args[0], r[1]) +", "args=[]): ui_instance = UI.instance r = ui_instance.server.step() if r: ui_instance.update_log(r + \"\\n\", Fore.RED)", "UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r =", "breakpoint at 'addr' and continue. 'addr' should be in hex getreg <reg> :", "Fore.RESET + Style.RESET_ALL self.pins.text = ANSI(pin_str) # update the dissassembly window # get", "stuff cmd_completer = WordCompleter(list(self.commands.keys())) self.prompt = TextArea( height=1, prompt=\"p2db > \", multiline=False, wrap_lines=False,", "do_redraw = True stat = self.server.get_status() if (stat): # draw the status dictionary", "Frame(Box(self.log_area, padding=1, padding_bottom=0)) self.prompt_window = Frame(self.prompt) body = VSplit([ self.cog_status_window, self.instruction_window, ]) root_container", "self.on_step, \"stepin\": self.on_stepin, \"stepout\": self.on_stepout, \"break\": self.on_break, \"getreg\": self.on_getreg, \"getbyte\": self.on_getbyte, \"getlong\": self.on_getlong,", "ui_instance.server.update_pins() def on_continue(self, args=[]): ui_instance = UI.instance ui_instance.server.continue_exec() def on_reset(self, args): ui_instance =", "cog to n cogaddr <addr> : Set the cog execution address (for native", "get_section_str(self, sec, ptr): ''' return a atring for all instructions in a given", "\"\\n\") except ValueError: ui_instance.update_log(\"{} -> {:#02x}\".format(args[0], r[1]) + \"\\n\") def on_getbyte(self, args): ui_instance", "!= 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_long(args[0]) if (r[0]):", "= self.prompt.text args = cmd.split(' ') if args[0] in self.commands: self.commands[args[0]](args[1:]) else: self.update_log(\"Unknown", "call_addr) # else: # call_dest = '' inst = \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0],", "+ Fore.RESET self.connection.text = ANSI(conn_str) # draw the pin states porta_str = ''", "self.obj_data = objdata self.app = Application(full_screen=True) self.current_func = '' self.dirty = True self.render_lock", "# draw cog connections status's conn_str = '' for i in range(8): fmt", "(for native cogs) continue : (unimplemented) Continue execution. Cog will be disconnected until", "self.render_lock.acquire() self.server.stat_dirty = False do_redraw = True stat = self.server.get_status() if (stat): #", "1) ]) # Frames for each section self.cog_status_window = Frame(Box(status_split, 1), \"Status\") self.instruction_window", "= ANSI(\"\") else: s = self.get_section_str(section, pc) self.instructions.text = ANSI(s) self.function_header.text = ANSI(func_name)", "Style.RESET_ALL return data_str def prerender(self, app): self.render_lock.acquire() def postrender(self, app): self.render_lock.release() def data_updater(self):", "logging.getLogger('main') Char.display_mappings['\\t'] = '\\t' class UI: kb = KeyBindings() help_text = ''' p2db", "Fore.LIGHTGREEN_EX) if do_redraw: self.render_lock.release() self.app.invalidate() do_redraw = False time.sleep(0.02) def run(self): t =", "in stat_dict: if k.startswith('_'): pass elif k == 'pc': stat_lines.append(\"{: >30} : {:", "on_getlong(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\",", "ui_instance.server.cog_states[ui_instance.server.current_cog].status.set_cog_addr(addr) @kb.add('c-p') def on_pins(self, args=[]): ui_instance = UI.instance ui_instance.server.update_pins() def on_continue(self, args=[]): ui_instance", "import Fore, Style import threading import logging import time import re from .", "self.connection.text = ANSI(conn_str) # draw the pin states porta_str = '' portb_str =", "sec[i][0], sec[i][1]) if 'call' in sec[i][1]: # if 'calla' in sec[i][1]: # pat", "0x200) + 0x200 # call_dest = p2tools.get_section(self.obj_data, call_addr) # else: # call_dest =", "def on_reset(self, args): ui_instance = UI.instance ui_instance.update_log('reset unimplemented\\n') def on_quit(self, args): ui_instance =", "call_dest = p2tools.get_section(self.obj_data, call_addr) # if call_addr != 0: # # if call", "= threading.Lock() # dict of commands and handler function for each self.commands =", "window # get the function the current PC is in pc = stat.get_mem_pc()", "HSplit([ body, self.log_window, self.prompt_window ]) layout = Layout(root_container, self.prompt) self.app = Application(layout=layout, key_bindings=self.kb,", "= FormattedTextControl(ANSI('')) self.connection = FormattedTextControl(ANSI('')) self.pins = FormattedTextControl(ANSI('')) status_split = HSplit([ VSplit([ Window(self.status),", "def on_getbyte(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1", "or 'dj' in sec[i][1]: inst = \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.CYAN, sec[i][1], Fore.RESET)", "log = logging.getLogger('main') Char.display_mappings['\\t'] = '\\t' class UI: kb = KeyBindings() help_text =", "class UI: kb = KeyBindings() help_text = ''' p2db ---- help : Print", "= objdata self.app = Application(full_screen=True) self.current_func = '' self.dirty = True self.render_lock =", "== 'pc': stat_lines.append(\"{: >30} : {: <#8x}\".format(k, stat_dict[k])) else: stat_lines.append(\"{: >30} : {!s:", "@ {:#02x} -> {:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\") def on_getlong(self, args): ui_instance =", "'calla' in sec[i][1]: # pat = r'^(.*?) #\\\\([0-9]+)(.*?)' # pattern to get the", "'*' conn_str += fmt + '{: >10}'.format('{} Cog {}\\n'.format(marker, i)) + Style.RESET_ALL +", "Window(self.status), Box(Window(self.connection, align=WindowAlign.RIGHT), 3, padding_top=0) ]), Frame(Box(Window(self.pins, width=95, height=5), padding=3, padding_bottom=0, padding_top=1), \"Pins\")", "# call_addr = int(r.group(2)) if r else 0 # call_dest = p2tools.get_section(self.obj_data, call_addr)", "<n> : Set the active cog to n cogaddr <addr> : Set the", "# draw the pin states porta_str = '' portb_str = '' for i", "self.update_log(c, Fore.LIGHTGREEN_EX) if do_redraw: self.render_lock.release() self.app.invalidate() do_redraw = False time.sleep(0.02) def run(self): t", "Fore.RED) return r = ui_instance.server.get_byte(args[0]) if (r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else: ui_instance.update_log(\"byte", "cog\" self.function_header.text = ANSI(\"*** No connection to cog\") # get the log data", "hub address 'addr'. Address should be in hex pins : Update pin status", "inst + Style.RESET_ALL self.pc_line = int((i - section_addr)/4) else: data_str += ' '*self.pc_cursor_size", ": Quit ''' pc_cursor_string = Fore.CYAN + \" ---> \" + Fore.RESET pc_cursor_size", "# pat = r'^(.*?) #\\\\([0-9]+)(.*?)' # pattern to get the address of a", "Fore.RED) @kb.add('c-t') def on_stepin(self, args=[]): ui_instance = UI.instance r = ui_instance.server.stepin() if r:", "stat_text = '\\n'.join(stat_lines) self.status.text = stat_text # draw cog connections status's conn_str =", "ui_instance.update_log(\"{} -> {:#02x}\".format(args[0], r[1]) + \"\\n\") def on_getbyte(self, args): ui_instance = UI.instance if", "+ \"\\n\", Fore.RED) def on_getreg(self, args): ui_instance = UI.instance if len(args) != 1:", "+ \"\\n\", Fore.RED) @kb.add('c-t') def on_stepin(self, args=[]): ui_instance = UI.instance r = ui_instance.server.stepin()", "on_getreg(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\",", "it to where the LUT function is stored in HUB ram # if", "shift_focus(e): e.app.layout.focus_next() def update_log(self, new_text, color=\"\"): self.log.text = ANSI(self.log.text.value + color + new_text", "sec[i][1]) if 'call' in sec[i][1]: # if 'calla' in sec[i][1]: # pat =", "'' for sec in self.obj_data: if pc in self.obj_data[sec]: section = self.obj_data[sec] func_name", "from prompt_toolkit.layout.containers import VSplit, HSplit, Window from prompt_toolkit.layout.layout import Layout from prompt_toolkit.layout import", "should be in hex pins : Update pin status data cog <n> :", "= ui_instance.server.stepin() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-o') def on_stepout(self, args=[]): ui_instance", "__init__(self, server: p2db_server.P2DBServer, objdata): assert(not UI.instance) UI.instance = self self.server = server self.obj_data", "else: self.status.text = \"*** No connection to cog\" self.function_header.text = ANSI(\"*** No connection", "handler function for each self.commands = { \"step\": self.on_step, \"stepin\": self.on_stepin, \"stepout\": self.on_stepout,", "self.on_break, \"getreg\": self.on_getreg, \"getbyte\": self.on_getbyte, \"getlong\": self.on_getlong, \"continue\": self.on_continue, \"pins\": self.on_pins, \"cog\": self.on_cog,", "call_addr) # if call_addr != 0: # # if call address is 0x200-0x400,", "if not self.server.have_pin_data: bit = 'X' color = Fore.LIGHTBLACK_EX portb_str += color +", "ui_instance = UI.instance ui_instance.app.exit() def accept(self, buff): cmd = self.prompt.text args = cmd.split('", "else: data_str += ' '*self.pc_cursor_size + inst + Style.RESET_ALL return data_str def prerender(self,", "+= color + \"{0: <3}\".format(bit) if (self.server.dirb >> i) & 1: color =", "sec in self.obj_data: if pc in self.obj_data[sec]: section = self.obj_data[sec] func_name = sec", "= 'L' if not self.server.have_pin_data: bit = 'X' color = Fore.LIGHTBLACK_EX portb_str +=", "in sec[i][1]: inst = \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.CYAN, sec[i][1], Fore.RESET) else: inst", "draw cog connections status's conn_str = '' for i in range(8): fmt =", "(r[0]): ui_instance.update_log(r[0], Fore.RED) else: try: addr = int(args[0], 16) ui_instance.update_log(\"reg {:#02x} -> {:#02x}\".format(addr,", "complete_while_typing=True, completer=cmd_completer, accept_handler = self.accept, focus_on_click=True, ) # status window stuff self.status =", "3, padding_top=0) ]), Frame(Box(Window(self.pins, width=95, height=5), padding=3, padding_bottom=0, padding_top=1), \"Pins\") ]) # instruction", "min(self.pc_line, self.instructions.text.value.count('\\n'))) return Point(0, y) self.pc_line = 0 self.instructions = FormattedTextControl(ANSI(''), focusable=True, get_cursor_position=inst_cursor_pos)", "+= fmt + '{: >10}'.format('{} Cog {}\\n'.format(marker, i)) + Style.RESET_ALL + Fore.RESET self.connection.text", "c = self.server.log_queue.get() if c != '\\r': self.update_log(c, Fore.LIGHTGREEN_EX) if do_redraw: self.render_lock.release() self.app.invalidate()", "if call_addr >= 0x200 and call_addr < 0x400: # call_addr = 4*(call_addr -", "len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_reg(args[0]) if", "Fore.RED) return try: addr = int(args[0], 16) except ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\",", "prompt_toolkit.completion import WordCompleter from prompt_toolkit.data_structures import Point from prompt_toolkit.formatted_text import ANSI from prompt_toolkit.layout.screen", "FormattedTextControl(ANSI(''), focusable=True, get_cursor_position=inst_cursor_pos) self.function_header = FormattedTextControl(ANSI('')) instruction_split = HSplit([ Box(Window(self.function_header, height=1), 1, padding_top=0),", "1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return try: addr = int(args[0], 16) except", "+ 0x200 # call_dest = p2tools.get_section(self.obj_data, call_addr) # else: # call_dest = ''", "y = self.log.text.value.count('\\n') return Point(0, y) self.log = FormattedTextControl(ANSI(\"\"), get_cursor_position=log_cursor_pos) self.log_area = Window(self.log)", "VSplit([ Window(self.status), Box(Window(self.connection, align=WindowAlign.RIGHT), 3, padding_top=0) ]), Frame(Box(Window(self.pins, width=95, height=5), padding=3, padding_bottom=0, padding_top=1),", "function for each self.commands = { \"step\": self.on_step, \"stepin\": self.on_stepin, \"stepout\": self.on_stepout, \"break\":", "in self.commands: self.commands[args[0]](args[1:]) else: self.update_log(\"Unknown command: \" + args[0] + \"\\n\", Fore.RED) self.dirty", "bit = 'X' color = Fore.LIGHTBLACK_EX portb_str += color + \"{0: <3}\".format(bit) pin_str", "address 'addr'. Address should be in hex pins : Update pin status data", "sec[i][0], Fore.CYAN, sec[i][1], Fore.RESET) else: inst = \" {:x}: {}{}\\n\".format(i, sec[i][0], sec[i][1]) if", "16), r[1]) + \"\\n\") def on_cog(self, args): ui_instance = UI.instance if len(args) !=", "Frame(Box(Window(self.pins, width=95, height=5), padding=3, padding_bottom=0, padding_top=1), \"Pins\") ]) # instruction window stuff def", "get_cursor_position=inst_cursor_pos) self.function_header = FormattedTextControl(ANSI('')) instruction_split = HSplit([ Box(Window(self.function_header, height=1), 1, padding_top=0), Box(Window(self.instructions, height=40),", "def get_section_str(self, sec, ptr): ''' return a atring for all instructions in a", "def shift_focus(e): e.app.layout.focus_next() def update_log(self, new_text, color=\"\"): self.log.text = ANSI(self.log.text.value + color +", "= UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r", "Address should be in hex pins : Update pin status data cog <n>", "sec[i][1]) if i == ptr: data_str += Style.BRIGHT + self.pc_cursor_string + inst +", "+ \"\\n\") def on_cog(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error:", "else: color = Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.ina >> i) & 1: bit", "Style.BRIGHT if (self.server.ina >> i) & 1: bit = 'H' else: bit =", "def on_help(self, args): ui_instance = UI.instance ui_instance.update_log(ui_instance.help_text + \"\\n\") @kb.add('c-s') def on_step(self, args=[]):", "FormattedTextControl(ANSI('')) status_split = HSplit([ VSplit([ Window(self.status), Box(Window(self.connection, align=WindowAlign.RIGHT), 3, padding_top=0) ]), Frame(Box(Window(self.pins, width=95,", "self.prompt = TextArea( height=1, prompt=\"p2db > \", multiline=False, wrap_lines=False, complete_while_typing=True, completer=cmd_completer, accept_handler =", "= self.accept, focus_on_click=True, ) # status window stuff self.status = FormattedTextControl(ANSI('')) self.connection =", "HSplit([ VSplit([ Window(self.status), Box(Window(self.connection, align=WindowAlign.RIGHT), 3, padding_top=0) ]), Frame(Box(Window(self.pins, width=95, height=5), padding=3, padding_bottom=0,", "# log stuff def log_cursor_pos(): y = self.log.text.value.count('\\n') return Point(0, y) self.log =", "'H' else: bit = 'L' if not self.server.have_pin_data: bit = 'X' color =", "Fore.CYAN, sec[i][1], Fore.RESET) else: inst = \" {:x}: {}{}\\n\".format(i, sec[i][0], sec[i][1]) if i", "r = ui_instance.server.stepout() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) def on_break(self, args): ui_instance", "current function call break <addr> : Set breakpoint at 'addr' and continue. 'addr'", "@kb.add('c-s') def on_step(self, args=[]): ui_instance = UI.instance r = ui_instance.server.step() if r: ui_instance.update_log(r", "in 'reg'. 'reg' can be an address or register name. Address should be", "sec['section_addr'] for i in range(section_addr, section_addr + 4*(len(sec) - 1), 4): inst =", "1), \"Source\") self.log_window = Frame(Box(self.log_area, padding=1, padding_bottom=0)) self.prompt_window = Frame(self.prompt) body = VSplit([", "not self.server.have_pin_data: bit = 'X' color = Fore.LIGHTBLACK_EX portb_str += color + \"{0:", "\"break\": self.on_break, \"getreg\": self.on_getreg, \"getbyte\": self.on_getbyte, \"getlong\": self.on_getlong, \"continue\": self.on_continue, \"pins\": self.on_pins, \"cog\":", "self.app = Application(layout=layout, key_bindings=self.kb, full_screen=True, before_render=self.prerender, after_render=self.postrender) self.app.layout.focus(self.prompt_window) @kb.add('c-c') @kb.add('c-q') def exit_(event): event.app.exit()", "# dict of commands and handler function for each self.commands = { \"step\":", "ptr): ''' return a atring for all instructions in a given section. place", "UI.instance r = ui_instance.server.stepin() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-o') def on_stepout(self,", "+ '\\n\\n\\n' + portb_str + Fore.RESET + Style.RESET_ALL self.pins.text = ANSI(pin_str) # update", "+ 4*(len(sec) - 1), 4): inst = \" {:x}: {} {}\\n\".format(i, sec[i][0], sec[i][1])", "for sec in self.obj_data: if pc in self.obj_data[sec]: section = self.obj_data[sec] func_name =", "instruction # r = re.search(pat, sec[i][1]) # call_addr = int(r.group(2)) if r else", "color = Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.inb >> i) & 1: bit =", "argument\\n\", Fore.RED) return ui_instance.server.cog_states[ui_instance.server.current_cog].status.set_cog_addr(addr) @kb.add('c-p') def on_pins(self, args=[]): ui_instance = UI.instance ui_instance.server.update_pins() def", "self.instructions.text.value.count('\\n'))) return Point(0, y) self.pc_line = 0 self.instructions = FormattedTextControl(ANSI(''), focusable=True, get_cursor_position=inst_cursor_pos) self.function_header", "a atring for all instructions in a given section. place the cursor string", "while not self.server.log_queue.empty(): c = self.server.log_queue.get() if c != '\\r': self.update_log(c, Fore.LIGHTGREEN_EX) if", "\" + args[0] + \"\\n\", Fore.RED) self.dirty = True @kb.add('c-i') def shift_focus(e): e.app.layout.focus_next()", "ANSI(\"*** No connection to cog\") # get the log data while not self.server.log_queue.empty():", "if (self.server.dirb >> i) & 1: color = Fore.RED + Style.BRIGHT else: color", "this dialog step [Ctrl+S] : Step by one instruction. Call instructions are stepped", "r = ui_instance.server.stepin() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-o') def on_stepout(self, args=[]):", "== p2db_server.CogState.IDLE: fmt = Fore.GREEN elif self.server.cog_states[i].get_state() == p2db_server.CogState.EXECUTING: fmt = Fore.YELLOW else:", "on_step(self, args=[]): ui_instance = UI.instance r = ui_instance.server.step() if r: ui_instance.update_log(r + \"\\n\",", "connection to cog\") # get the log data while not self.server.log_queue.empty(): c =", "stored in HUB ram # if call_addr >= 0x200 and call_addr < 0x400:", "help : Print this dialog step [Ctrl+S] : Step by one instruction. Call", "draw the status dictionary stat_dict = vars(self.server.get_status()) stat_lines = [] for k in", "= stat_text # draw cog connections status's conn_str = '' for i in", "on_quit(self, args): ui_instance = UI.instance ui_instance.app.exit() def accept(self, buff): cmd = self.prompt.text args", "r = ui_instance.server.step() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-t') def on_stepin(self, args=[]):", "not self.server.have_pin_data: bit = 'X' color = Fore.LIGHTBLACK_EX porta_str += color + \"{0:", "accept(self, buff): cmd = self.prompt.text args = cmd.split(' ') if args[0] in self.commands:", "+= ' '*self.pc_cursor_size + inst + Style.RESET_ALL return data_str def prerender(self, app): self.render_lock.acquire()", "color = Fore.LIGHTBLACK_EX portb_str += color + \"{0: <3}\".format(bit) pin_str = porta_str +", "in HUB ram # if call_addr >= 0x200 and call_addr < 0x400: #", "r[1]) + \"\\n\") def on_getlong(self, args): ui_instance = UI.instance if len(args) != 1:", "FormattedTextControl(ANSI('')) self.connection = FormattedTextControl(ANSI('')) self.pins = FormattedTextControl(ANSI('')) status_split = HSplit([ VSplit([ Window(self.status), Box(Window(self.connection,", "r = ui_instance.server.get_byte(args[0]) if (r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else: ui_instance.update_log(\"byte @ {:#02x}", "def on_pins(self, args=[]): ui_instance = UI.instance ui_instance.server.update_pins() def on_continue(self, args=[]): ui_instance = UI.instance", "return try: cog_num = int(args[0]) except ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED) return", "address 'addr'. Address should be in hex getlong <addr> : Get the long", "#\\\\([0-9]+)(.*?)' # pattern to get the address of a call instruction # r", "import Layout from prompt_toolkit.layout import FormattedTextControl, WindowAlign from prompt_toolkit.key_binding import KeyBindings from prompt_toolkit.widgets", "1 argument\\n\", Fore.RED) return r = ui_instance.server.get_long(args[0]) if (r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED)", "ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return try: addr = int(args[0], 16) except ValueError:", "try: cog_num = int(args[0]) except ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED) return ui_instance.server.set_cog(cog_num)", "address is 0x200-0x400, convert it to where the LUT function is stored in", "= UI.instance ui_instance.server.update_pins() def on_continue(self, args=[]): ui_instance = UI.instance ui_instance.server.continue_exec() def on_reset(self, args):", "} # log stuff def log_cursor_pos(): y = self.log.text.value.count('\\n') return Point(0, y) self.log", "'L' if not self.server.have_pin_data: bit = 'X' color = Fore.LIGHTBLACK_EX porta_str += color", "self.function_header.text = ANSI(\"*** No connection to cog\") # get the log data while", "VSplit([ self.cog_status_window, self.instruction_window, ]) root_container = HSplit([ body, self.log_window, self.prompt_window ]) layout =", "FormattedTextControl(ANSI('')) instruction_split = HSplit([ Box(Window(self.function_header, height=1), 1, padding_top=0), Box(Window(self.instructions, height=40), 1) ]) #", "to cog\" self.function_header.text = ANSI(\"*** No connection to cog\") # get the log", ". import p2db_server log = logging.getLogger('main') Char.display_mappings['\\t'] = '\\t' class UI: kb =", "instructions are stepped over. Modifier instructions (augd/s, setq) will be skipped. stepin [Ctrl+T]", "byte at hub address 'addr'. Address should be in hex getlong <addr> :", "fmt + '{: >10}'.format('{} Cog {}\\n'.format(marker, i)) + Style.RESET_ALL + Fore.RESET self.connection.text =", "func_name = '' for sec in self.obj_data: if pc in self.obj_data[sec]: section =", "k in stat_dict: if k.startswith('_'): pass elif k == 'pc': stat_lines.append(\"{: >30} :", "Fore.RESET) else: inst = \" {:x}: {}{}\\n\".format(i, sec[i][0], sec[i][1]) if i == ptr:", "\"\\n\", Fore.RED) def on_getreg(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error:", "Fore.YELLOW else: fmt = Fore.RED if i == self.server.current_cog: fmt += Style.BRIGHT marker", "+ color + new_text + Fore.RESET) def get_section_str(self, sec, ptr): ''' return a", "on_stepout(self, args=[]): ui_instance = UI.instance r = ui_instance.server.stepout() if r: ui_instance.update_log(r + \"\\n\",", "a call instruction # r = re.search(pat, sec[i][1]) # call_addr = int(r.group(2)) if", "1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return try: cog_num = int(args[0]) except ValueError:", "call_addr = int(r.group(2)) if r else 0 # call_dest = p2tools.get_section(self.obj_data, call_addr) #", "+ portb_str + Fore.RESET + Style.RESET_ALL self.pins.text = ANSI(pin_str) # update the dissassembly", "= ANSI(pin_str) # update the dissassembly window # get the function the current", "sec[i][1] or 'tj' in sec[i][1] or 'dj' in sec[i][1]: inst = \" {:x}:", "call stepout [Ctrl+O] : Step out of the current function call break <addr>", "+ \"Cog Execution Mode. Set base address with 'cogaddr' to see disassembly\" +", "+ \"\\n\") @kb.add('c-s') def on_step(self, args=[]): ui_instance = UI.instance r = ui_instance.server.step() if", "ui_instance.update_log('reset unimplemented\\n') def on_quit(self, args): ui_instance = UI.instance ui_instance.app.exit() def accept(self, buff): cmd", "of a call instruction # r = re.search(pat, sec[i][1]) # call_addr = int(r.group(2))", "porta_str = '' portb_str = '' for i in range(32): bit = ''", "data_str += ' '*self.pc_cursor_size + inst + Style.RESET_ALL return data_str def prerender(self, app):", "-> {:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\") def on_getlong(self, args): ui_instance = UI.instance if", "p2db_server log = logging.getLogger('main') Char.display_mappings['\\t'] = '\\t' class UI: kb = KeyBindings() help_text", "= 'X' color = Fore.LIGHTBLACK_EX portb_str += color + \"{0: <3}\".format(bit) pin_str =", "Address should be in hex getlong <addr> : Get the long at hub", "stuff def inst_cursor_pos(): y = max(0, min(self.pc_line, self.instructions.text.value.count('\\n'))) return Point(0, y) self.pc_line =", "cog execution address (for native cogs) continue : (unimplemented) Continue execution. Cog will", "for each self.commands = { \"step\": self.on_step, \"stepin\": self.on_stepin, \"stepout\": self.on_stepout, \"break\": self.on_break,", "# if call address is 0x200-0x400, convert it to where the LUT function", "self.on_quit, \"help\": self.on_help } # log stuff def log_cursor_pos(): y = self.log.text.value.count('\\n') return", ": {!s: <8}\".format(k, stat_dict[k])) stat_text = '\\n'.join(stat_lines) self.status.text = stat_text # draw cog", "if (self.server.dira >> i) & 1: color = Fore.RED + Style.BRIGHT else: color", "p2tools.get_section(self.obj_data, call_addr) # if call_addr != 0: # # if call address is", "self.dirty = True @kb.add('c-i') def shift_focus(e): e.app.layout.focus_next() def update_log(self, new_text, color=\"\"): self.log.text =", "(unimplemented) Reload the current program quit [Ctrl+Q] : Quit ''' pc_cursor_string = Fore.CYAN", "else: bit = 'L' if not self.server.have_pin_data: bit = 'X' color = Fore.LIGHTBLACK_EX", "self.cog_status_window = Frame(Box(status_split, 1), \"Status\") self.instruction_window = Frame(Box(instruction_split, 1), \"Source\") self.log_window = Frame(Box(self.log_area,", "'call' in sec[i][1]: # if 'calla' in sec[i][1]: # pat = r'^(.*?) #\\\\([0-9]+)(.*?)'", "by one instruction. Call instructions are stepped over. Modifier instructions (augd/s, setq) will", "\"getlong\": self.on_getlong, \"continue\": self.on_continue, \"pins\": self.on_pins, \"cog\": self.on_cog, \"cogaddr\": self.on_cogaddr, \"reset\": self.on_reset, \"quit\":", "def accept(self, buff): cmd = self.prompt.text args = cmd.split(' ') if args[0] in", "self.log.text = ANSI(self.log.text.value + color + new_text + Fore.RESET) def get_section_str(self, sec, ptr):", "args=[]): ui_instance = UI.instance r = ui_instance.server.stepout() if r: ui_instance.update_log(r + \"\\n\", Fore.RED)", "c != '\\r': self.update_log(c, Fore.LIGHTGREEN_EX) if do_redraw: self.render_lock.release() self.app.invalidate() do_redraw = False time.sleep(0.02)", "ANSI from prompt_toolkit.layout.screen import Char from colorama import Fore, Style import threading import", "self.current_func = '' self.dirty = True self.render_lock = threading.Lock() # dict of commands", "the current PC is in pc = stat.get_mem_pc() cog_mode = stat.exec_mode == \"cogex\"", "= 4*(call_addr - 0x200) + 0x200 # call_dest = p2tools.get_section(self.obj_data, call_addr) # else:", "instructions in a given section. place the cursor string at PTR ''' data_str", "= Fore.RED if i == self.server.current_cog: fmt += Style.BRIGHT marker = '*' conn_str", ": Print this dialog step [Ctrl+S] : Step by one instruction. Call instructions", "on_cog(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\",", "window stuff self.status = FormattedTextControl(ANSI('')) self.connection = FormattedTextControl(ANSI('')) self.pins = FormattedTextControl(ANSI('')) status_split =", "color + \"{0: <3}\".format(bit) pin_str = porta_str + '\\n\\n\\n' + portb_str + Fore.RESET", "height=1), 1, padding_top=0), Box(Window(self.instructions, height=40), 1) ]) # Frames for each section self.cog_status_window", "args): ui_instance = UI.instance ui_instance.update_log('reset unimplemented\\n') def on_quit(self, args): ui_instance = UI.instance ui_instance.app.exit()", "on_stepin(self, args=[]): ui_instance = UI.instance r = ui_instance.server.stepin() if r: ui_instance.update_log(r + \"\\n\",", "interrupts itself reset : (unimplemented) Reload the current program quit [Ctrl+Q] : Quit", "import Frame, TextArea, Box from prompt_toolkit.completion import WordCompleter from prompt_toolkit.data_structures import Point from", "log stuff def log_cursor_pos(): y = self.log.text.value.count('\\n') return Point(0, y) self.log = FormattedTextControl(ANSI(\"\"),", "True self.render_lock = threading.Lock() # dict of commands and handler function for each", "Application(full_screen=True) self.current_func = '' self.dirty = True self.render_lock = threading.Lock() # dict of", "if args[0] in self.commands: self.commands[args[0]](args[1:]) else: self.update_log(\"Unknown command: \" + args[0] + \"\\n\",", "Style.BRIGHT marker = '*' conn_str += fmt + '{: >10}'.format('{} Cog {}\\n'.format(marker, i))", "= Frame(Box(self.log_area, padding=1, padding_bottom=0)) self.prompt_window = Frame(self.prompt) body = VSplit([ self.cog_status_window, self.instruction_window, ])", "padding=1, padding_bottom=0)) self.prompt_window = Frame(self.prompt) body = VSplit([ self.cog_status_window, self.instruction_window, ]) root_container =", "= ui_instance.server.step() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-t') def on_stepin(self, args=[]): ui_instance", "be in hex getbyte <addr> : Get the byte at hub address 'addr'.", "0 # call_dest = p2tools.get_section(self.obj_data, call_addr) # if call_addr != 0: # #", "prompt_toolkit.layout.containers import VSplit, HSplit, Window from prompt_toolkit.layout.layout import Layout from prompt_toolkit.layout import FormattedTextControl,", "prompt_toolkit import Application from prompt_toolkit.layout.containers import VSplit, HSplit, Window from prompt_toolkit.layout.layout import Layout", "Frame(self.prompt) body = VSplit([ self.cog_status_window, self.instruction_window, ]) root_container = HSplit([ body, self.log_window, self.prompt_window", "log_cursor_pos(): y = self.log.text.value.count('\\n') return Point(0, y) self.log = FormattedTextControl(ANSI(\"\"), get_cursor_position=log_cursor_pos) self.log_area =", "10 instance = None def __init__(self, server: p2db_server.P2DBServer, objdata): assert(not UI.instance) UI.instance =", "# call_dest = p2tools.get_section(self.obj_data, call_addr) # else: # call_dest = '' inst =", "in sec[i][1]: # if 'calla' in sec[i][1]: # pat = r'^(.*?) #\\\\([0-9]+)(.*?)' #", "inst_cursor_pos(): y = max(0, min(self.pc_line, self.instructions.text.value.count('\\n'))) return Point(0, y) self.pc_line = 0 self.instructions", "+ Style.RESET_ALL return data_str def prerender(self, app): self.render_lock.acquire() def postrender(self, app): self.render_lock.release() def", "pass elif k == 'pc': stat_lines.append(\"{: >30} : {: <#8x}\".format(k, stat_dict[k])) else: stat_lines.append(\"{:", "the long at hub address 'addr'. Address should be in hex pins :", "Fore.RED + Style.BRIGHT else: color = Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.inb >> i)", "UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return try: cog_num", "= Frame(self.prompt) body = VSplit([ self.cog_status_window, self.instruction_window, ]) root_container = HSplit([ body, self.log_window,", "self.on_cog, \"cogaddr\": self.on_cogaddr, \"reset\": self.on_reset, \"quit\": self.on_quit, \"help\": self.on_help } # log stuff", "time import re from . import p2tools from . import p2db_server log =", "native cogs) continue : (unimplemented) Continue execution. Cog will be disconnected until it", "in hex getbyte <addr> : Get the byte at hub address 'addr'. Address", "= HSplit([ body, self.log_window, self.prompt_window ]) layout = Layout(root_container, self.prompt) self.app = Application(layout=layout,", "self.get_section_str(section, pc) self.instructions.text = ANSI(s) self.function_header.text = ANSI(func_name) else: self.status.text = \"*** No", "import FormattedTextControl, WindowAlign from prompt_toolkit.key_binding import KeyBindings from prompt_toolkit.widgets import Frame, TextArea, Box", "len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return try: addr = int(args[0],", "= re.search(pat, sec[i][1]) # call_addr = int(r.group(2)) if r else 0 # call_dest", "Fore.LIGHTBLACK_EX porta_str += color + \"{0: <3}\".format(bit) if (self.server.dirb >> i) & 1:", "Fore.RED) else: ui_instance.update_log(\"long @ {:#02x} -> {:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\") def on_cog(self,", "FormattedTextControl(ANSI('')) self.pins = FormattedTextControl(ANSI('')) status_split = HSplit([ VSplit([ Window(self.status), Box(Window(self.connection, align=WindowAlign.RIGHT), 3, padding_top=0)", "self.app.invalidate() do_redraw = False time.sleep(0.02) def run(self): t = threading.Thread(target=self.data_updater, daemon=True) t.start() self.app.run();", "focus_on_click=True, ) # status window stuff self.status = FormattedTextControl(ANSI('')) self.connection = FormattedTextControl(ANSI('')) self.pins", "sec[i][1]: inst = \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.CYAN, sec[i][1], Fore.RESET) else: inst =", "will be skipped. stepin [Ctrl+T] : Step into a function call stepout [Ctrl+O]", "if (r[0]): ui_instance.update_log(r[0], Fore.RED) else: try: addr = int(args[0], 16) ui_instance.update_log(\"reg {:#02x} ->", "ui_instance.server.step() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-t') def on_stepin(self, args=[]): ui_instance =", "prompt_toolkit.formatted_text import ANSI from prompt_toolkit.layout.screen import Char from colorama import Fore, Style import", "(stat): # draw the status dictionary stat_dict = vars(self.server.get_status()) stat_lines = [] for", "states porta_str = '' portb_str = '' for i in range(32): bit =", "func_name = sec if cog_mode and stat.exec_mode != 'lutex' and stat._cog_exec_base_addr == -1:", "ui_instance.server.stepin() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-o') def on_stepout(self, args=[]): ui_instance =", "\"\\n\", Fore.RED) def on_break(self, args): ui_instance = UI.instance r = ui_instance.server.breakpoint(args[0]) if r:", "data_str def prerender(self, app): self.render_lock.acquire() def postrender(self, app): self.render_lock.release() def data_updater(self): do_redraw =", "(r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else: ui_instance.update_log(\"long @ {:#02x} -> {:#02x}\".format(int(args[0], 16), r[1])", "dict of commands and handler function for each self.commands = { \"step\": self.on_step,", "self.log_area = Window(self.log) # prompt stuff cmd_completer = WordCompleter(list(self.commands.keys())) self.prompt = TextArea( height=1,", "for i in range(section_addr, section_addr + 4*(len(sec) - 1), 4): inst = \"", "stat_lines.append(\"{: >30} : {: <#8x}\".format(k, stat_dict[k])) else: stat_lines.append(\"{: >30} : {!s: <8}\".format(k, stat_dict[k]))", "color = Fore.RED + Style.BRIGHT else: color = Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.inb", "server self.obj_data = objdata self.app = Application(full_screen=True) self.current_func = '' self.dirty = True", "else: stat_lines.append(\"{: >30} : {!s: <8}\".format(k, stat_dict[k])) stat_text = '\\n'.join(stat_lines) self.status.text = stat_text", "# get the function the current PC is in pc = stat.get_mem_pc() cog_mode", "root_container = HSplit([ body, self.log_window, self.prompt_window ]) layout = Layout(root_container, self.prompt) self.app =", "from prompt_toolkit.formatted_text import ANSI from prompt_toolkit.layout.screen import Char from colorama import Fore, Style", "= False do_redraw = True stat = self.server.get_status() if (stat): # draw the", "= stat.exec_mode == \"cogex\" func_name = '' for sec in self.obj_data: if pc", "cog_mode and stat.exec_mode != 'lutex' and stat._cog_exec_base_addr == -1: self.function_header.text = ANSI(Fore.YELLOW +", "to cog\") # get the log data while not self.server.log_queue.empty(): c = self.server.log_queue.get()", "0x200 and call_addr < 0x400: # call_addr = 4*(call_addr - 0x200) + 0x200", "self.function_header.text = ANSI(Fore.YELLOW + \"Cog Execution Mode. Set base address with 'cogaddr' to", "pat = r'^(.*?) #\\\\([0-9]+)(.*?)' # pattern to get the address of a call", "instance = None def __init__(self, server: p2db_server.P2DBServer, objdata): assert(not UI.instance) UI.instance = self", "color = Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.ina >> i) & 1: bit =", "padding_top=1), \"Pins\") ]) # instruction window stuff def inst_cursor_pos(): y = max(0, min(self.pc_line,", "on_getbyte(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\",", "= '' for i in range(32): bit = '' if (self.server.dira >> i)", "p2db_server.CogState.IDLE: fmt = Fore.GREEN elif self.server.cog_states[i].get_state() == p2db_server.CogState.EXECUTING: fmt = Fore.YELLOW else: fmt", "{:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.CYAN, sec[i][1], Fore.RESET) else: inst = \" {:x}: {}{}\\n\".format(i, sec[i][0],", "{:x}: {}{}\\n\".format(i, sec[i][0], sec[i][1]) if i == ptr: data_str += Style.BRIGHT + self.pc_cursor_string", "multiline=False, wrap_lines=False, complete_while_typing=True, completer=cmd_completer, accept_handler = self.accept, focus_on_click=True, ) # status window stuff", "TextArea, Box from prompt_toolkit.completion import WordCompleter from prompt_toolkit.data_structures import Point from prompt_toolkit.formatted_text import", "inst = \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.CYAN, sec[i][1], Fore.RESET) else: inst = \"", "{: <#8x}\".format(k, stat_dict[k])) else: stat_lines.append(\"{: >30} : {!s: <8}\".format(k, stat_dict[k])) stat_text = '\\n'.join(stat_lines)", "\"cog\": self.on_cog, \"cogaddr\": self.on_cogaddr, \"reset\": self.on_reset, \"quit\": self.on_quit, \"help\": self.on_help } # log", "'\\t' class UI: kb = KeyBindings() help_text = ''' p2db ---- help :", "{}\\n\".format(i, sec[i][0], sec[i][1]) if 'call' in sec[i][1]: # if 'calla' in sec[i][1]: #", "log data while not self.server.log_queue.empty(): c = self.server.log_queue.get() if c != '\\r': self.update_log(c,", "int(r.group(2)) if r else 0 # call_dest = p2tools.get_section(self.obj_data, call_addr) # if call_addr", "the cog execution address (for native cogs) continue : (unimplemented) Continue execution. Cog", "if cog_mode and stat.exec_mode != 'lutex' and stat._cog_exec_base_addr == -1: self.function_header.text = ANSI(Fore.YELLOW", ": Set the cog execution address (for native cogs) continue : (unimplemented) Continue", "= p2tools.get_section(self.obj_data, call_addr) # else: # call_dest = '' inst = \" {:x}:", ": Step by one instruction. Call instructions are stepped over. Modifier instructions (augd/s,", "max(0, min(self.pc_line, self.instructions.text.value.count('\\n'))) return Point(0, y) self.pc_line = 0 self.instructions = FormattedTextControl(ANSI(''), focusable=True,", "k == 'pc': stat_lines.append(\"{: >30} : {: <#8x}\".format(k, stat_dict[k])) else: stat_lines.append(\"{: >30} :", "!= 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return try: addr = int(args[0], 16)", "update the dissassembly window # get the function the current PC is in", "ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_reg(args[0]) if (r[0]): ui_instance.update_log(r[0], Fore.RED)", "sec[i][1] or 'dj' in sec[i][1]: inst = \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.CYAN, sec[i][1],", "= 'L' if not self.server.have_pin_data: bit = 'X' color = Fore.LIGHTBLACK_EX porta_str +=", "Get the value in 'reg'. 'reg' can be an address or register name.", "= Application(layout=layout, key_bindings=self.kb, full_screen=True, before_render=self.prerender, after_render=self.postrender) self.app.layout.focus(self.prompt_window) @kb.add('c-c') @kb.add('c-q') def exit_(event): event.app.exit() def", "prompt_toolkit.layout.layout import Layout from prompt_toolkit.layout import FormattedTextControl, WindowAlign from prompt_toolkit.key_binding import KeyBindings from", ": Step out of the current function call break <addr> : Set breakpoint", "= ANSI(\"*** No connection to cog\") # get the log data while not", "ui_instance = UI.instance r = ui_instance.server.stepout() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) def", "def on_cog(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1", "Style.RESET_ALL self.pc_line = int((i - section_addr)/4) else: data_str += ' '*self.pc_cursor_size + inst", "self.server.log_queue.empty(): c = self.server.log_queue.get() if c != '\\r': self.update_log(c, Fore.LIGHTGREEN_EX) if do_redraw: self.render_lock.release()", "current program quit [Ctrl+Q] : Quit ''' pc_cursor_string = Fore.CYAN + \" --->", "self.on_getreg, \"getbyte\": self.on_getbyte, \"getlong\": self.on_getlong, \"continue\": self.on_continue, \"pins\": self.on_pins, \"cog\": self.on_cog, \"cogaddr\": self.on_cogaddr,", "args=[]): ui_instance = UI.instance ui_instance.server.continue_exec() def on_reset(self, args): ui_instance = UI.instance ui_instance.update_log('reset unimplemented\\n')", "self.app.layout.focus(self.prompt_window) @kb.add('c-c') @kb.add('c-q') def exit_(event): event.app.exit() def on_help(self, args): ui_instance = UI.instance ui_instance.update_log(ui_instance.help_text", "@kb.add('c-q') def exit_(event): event.app.exit() def on_help(self, args): ui_instance = UI.instance ui_instance.update_log(ui_instance.help_text + \"\\n\")", "Get the long at hub address 'addr'. Address should be in hex pins", "color + \"{0: <3}\".format(bit) if (self.server.dirb >> i) & 1: color = Fore.RED", "def on_getreg(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1", "{ \"step\": self.on_step, \"stepin\": self.on_stepin, \"stepout\": self.on_stepout, \"break\": self.on_break, \"getreg\": self.on_getreg, \"getbyte\": self.on_getbyte,", "\" {:x}: {} {}\\n\".format(i, sec[i][0], sec[i][1]) if 'call' in sec[i][1]: # if 'calla'", "KeyBindings from prompt_toolkit.widgets import Frame, TextArea, Box from prompt_toolkit.completion import WordCompleter from prompt_toolkit.data_structures", "'dj' in sec[i][1]: inst = \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.CYAN, sec[i][1], Fore.RESET) else:", "current PC is in pc = stat.get_mem_pc() cog_mode = stat.exec_mode == \"cogex\" func_name", "cmd_completer = WordCompleter(list(self.commands.keys())) self.prompt = TextArea( height=1, prompt=\"p2db > \", multiline=False, wrap_lines=False, complete_while_typing=True,", "hex getreg <reg> : Get the value in 'reg'. 'reg' can be an", "Fore.RED) self.dirty = True @kb.add('c-i') def shift_focus(e): e.app.layout.focus_next() def update_log(self, new_text, color=\"\"): self.log.text", "self.server.get_status() if (stat): # draw the status dictionary stat_dict = vars(self.server.get_status()) stat_lines =", ". import p2tools from . import p2db_server log = logging.getLogger('main') Char.display_mappings['\\t'] = '\\t'", "call break <addr> : Set breakpoint at 'addr' and continue. 'addr' should be", "if r: ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-t') def on_stepin(self, args=[]): ui_instance = UI.instance", "hex getbyte <addr> : Get the byte at hub address 'addr'. Address should", "addr = int(args[0], 16) except ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED) return ui_instance.server.cog_states[ui_instance.server.current_cog].status.set_cog_addr(addr)", "the dissassembly window # get the function the current PC is in pc", "KeyBindings() help_text = ''' p2db ---- help : Print this dialog step [Ctrl+S]", "# # if call address is 0x200-0x400, convert it to where the LUT", "Fore.RED) @kb.add('c-o') def on_stepout(self, args=[]): ui_instance = UI.instance r = ui_instance.server.stepout() if r:", "height=1, prompt=\"p2db > \", multiline=False, wrap_lines=False, complete_while_typing=True, completer=cmd_completer, accept_handler = self.accept, focus_on_click=True, )", "''' p2db ---- help : Print this dialog step [Ctrl+S] : Step by", "Fore.RED) def on_getreg(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected", "ANSI(func_name) else: self.status.text = \"*** No connection to cog\" self.function_header.text = ANSI(\"*** No", "= max(0, min(self.pc_line, self.instructions.text.value.count('\\n'))) return Point(0, y) self.pc_line = 0 self.instructions = FormattedTextControl(ANSI(''),", "+ Fore.RESET) self.instructions.text = ANSI(\"\") else: s = self.get_section_str(section, pc) self.instructions.text = ANSI(s)", "align=WindowAlign.RIGHT), 3, padding_top=0) ]), Frame(Box(Window(self.pins, width=95, height=5), padding=3, padding_bottom=0, padding_top=1), \"Pins\") ]) #", "else: inst = \" {:x}: {}{}\\n\".format(i, sec[i][0], sec[i][1]) if i == ptr: data_str", "'' for i in range(32): bit = '' if (self.server.dira >> i) &", "args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED)", "color=\"\"): self.log.text = ANSI(self.log.text.value + color + new_text + Fore.RESET) def get_section_str(self, sec,", "an address or register name. Address should be in hex getbyte <addr> :", "<#8x}\".format(k, stat_dict[k])) else: stat_lines.append(\"{: >30} : {!s: <8}\".format(k, stat_dict[k])) stat_text = '\\n'.join(stat_lines) self.status.text", "pc_cursor_size = 10 instance = None def __init__(self, server: p2db_server.P2DBServer, objdata): assert(not UI.instance)", "stat_dict: if k.startswith('_'): pass elif k == 'pc': stat_lines.append(\"{: >30} : {: <#8x}\".format(k,", "= stat.get_mem_pc() cog_mode = stat.exec_mode == \"cogex\" func_name = '' for sec in", "import threading import logging import time import re from . import p2tools from", "re.search(pat, sec[i][1]) # call_addr = int(r.group(2)) if r else 0 # call_dest =", "of commands and handler function for each self.commands = { \"step\": self.on_step, \"stepin\":", "value in 'reg'. 'reg' can be an address or register name. Address should", "= self.get_section_str(section, pc) self.instructions.text = ANSI(s) self.function_header.text = ANSI(func_name) else: self.status.text = \"***", "= vars(self.server.get_status()) stat_lines = [] for k in stat_dict: if k.startswith('_'): pass elif", "+ Style.BRIGHT else: color = Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.inb >> i) &", "+= Style.BRIGHT marker = '*' conn_str += fmt + '{: >10}'.format('{} Cog {}\\n'.format(marker,", "self.obj_data: if pc in self.obj_data[sec]: section = self.obj_data[sec] func_name = sec if cog_mode", "int(args[0], 16) ui_instance.update_log(\"reg {:#02x} -> {:#02x}\".format(addr, r[1]) + \"\\n\") except ValueError: ui_instance.update_log(\"{} ->", "(self.server.stat_dirty or self.dirty): self.render_lock.acquire() self.server.stat_dirty = False do_redraw = True stat = self.server.get_status()", "Set base address with 'cogaddr' to see disassembly\" + Fore.RESET) self.instructions.text = ANSI(\"\")", "to n cogaddr <addr> : Set the cog execution address (for native cogs)", "for i in range(32): bit = '' if (self.server.dira >> i) & 1:", "while(1): if (self.server.stat_dirty or self.dirty): self.render_lock.acquire() self.server.stat_dirty = False do_redraw = True stat", "will be disconnected until it interrupts itself reset : (unimplemented) Reload the current", "import VSplit, HSplit, Window from prompt_toolkit.layout.layout import Layout from prompt_toolkit.layout import FormattedTextControl, WindowAlign", "self.accept, focus_on_click=True, ) # status window stuff self.status = FormattedTextControl(ANSI('')) self.connection = FormattedTextControl(ANSI(''))", "'addr'. Address should be in hex pins : Update pin status data cog", "Point(0, y) self.log = FormattedTextControl(ANSI(\"\"), get_cursor_position=log_cursor_pos) self.log_area = Window(self.log) # prompt stuff cmd_completer", "1), 4): inst = \" {:x}: {} {}\\n\".format(i, sec[i][0], sec[i][1]) if 'call' in", "\" ---> \" + Fore.RESET pc_cursor_size = 10 instance = None def __init__(self,", "= None def __init__(self, server: p2db_server.P2DBServer, objdata): assert(not UI.instance) UI.instance = self self.server", "help_text = ''' p2db ---- help : Print this dialog step [Ctrl+S] :", "ui_instance.server.get_byte(args[0]) if (r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else: ui_instance.update_log(\"byte @ {:#02x} -> {:#02x}\".format(int(args[0],", "= VSplit([ self.cog_status_window, self.instruction_window, ]) root_container = HSplit([ body, self.log_window, self.prompt_window ]) layout", "= \"*** No connection to cog\" self.function_header.text = ANSI(\"*** No connection to cog\")", "Set the cog execution address (for native cogs) continue : (unimplemented) Continue execution.", "cmd.split(' ') if args[0] in self.commands: self.commands[args[0]](args[1:]) else: self.update_log(\"Unknown command: \" + args[0]", "Step into a function call stepout [Ctrl+O] : Step out of the current", "\"step\": self.on_step, \"stepin\": self.on_stepin, \"stepout\": self.on_stepout, \"break\": self.on_break, \"getreg\": self.on_getreg, \"getbyte\": self.on_getbyte, \"getlong\":", "data_updater(self): do_redraw = False while(1): if (self.server.stat_dirty or self.dirty): self.render_lock.acquire() self.server.stat_dirty = False", "if r: ui_instance.update_log(r + \"\\n\", Fore.RED) def on_break(self, args): ui_instance = UI.instance r", "= HSplit([ VSplit([ Window(self.status), Box(Window(self.connection, align=WindowAlign.RIGHT), 3, padding_top=0) ]), Frame(Box(Window(self.pins, width=95, height=5), padding=3,", "continue. 'addr' should be in hex getreg <reg> : Get the value in", "[Ctrl+S] : Step by one instruction. Call instructions are stepped over. Modifier instructions", "bit = 'X' color = Fore.LIGHTBLACK_EX porta_str += color + \"{0: <3}\".format(bit) if", "HUB ram # if call_addr >= 0x200 and call_addr < 0x400: # call_addr", "<8}\".format(k, stat_dict[k])) stat_text = '\\n'.join(stat_lines) self.status.text = stat_text # draw cog connections status's", "self.obj_data[sec] func_name = sec if cog_mode and stat.exec_mode != 'lutex' and stat._cog_exec_base_addr ==", "FormattedTextControl(ANSI(\"\"), get_cursor_position=log_cursor_pos) self.log_area = Window(self.log) # prompt stuff cmd_completer = WordCompleter(list(self.commands.keys())) self.prompt =", "Window from prompt_toolkit.layout.layout import Layout from prompt_toolkit.layout import FormattedTextControl, WindowAlign from prompt_toolkit.key_binding import", "-> {:#02x}\".format(args[0], r[1]) + \"\\n\") def on_getbyte(self, args): ui_instance = UI.instance if len(args)", "self.commands: self.commands[args[0]](args[1:]) else: self.update_log(\"Unknown command: \" + args[0] + \"\\n\", Fore.RED) self.dirty =", "Box(Window(self.instructions, height=40), 1) ]) # Frames for each section self.cog_status_window = Frame(Box(status_split, 1),", "stepped over. Modifier instructions (augd/s, setq) will be skipped. stepin [Ctrl+T] : Step", "function call break <addr> : Set breakpoint at 'addr' and continue. 'addr' should", "self.dirty = True self.render_lock = threading.Lock() # dict of commands and handler function", "{!s: <8}\".format(k, stat_dict[k])) stat_text = '\\n'.join(stat_lines) self.status.text = stat_text # draw cog connections", "\"\\n\", Fore.RED) @kb.add('c-o') def on_stepout(self, args=[]): ui_instance = UI.instance r = ui_instance.server.stepout() if", "if 'call' in sec[i][1]: # if 'calla' in sec[i][1]: # pat = r'^(.*?)", "+= color + \"{0: <3}\".format(bit) pin_str = porta_str + '\\n\\n\\n' + portb_str +", "base address with 'cogaddr' to see disassembly\" + Fore.RESET) self.instructions.text = ANSI(\"\") else:", "self.render_lock.release() def data_updater(self): do_redraw = False while(1): if (self.server.stat_dirty or self.dirty): self.render_lock.acquire() self.server.stat_dirty", "program quit [Ctrl+Q] : Quit ''' pc_cursor_string = Fore.CYAN + \" ---> \"", "False while(1): if (self.server.stat_dirty or self.dirty): self.render_lock.acquire() self.server.stat_dirty = False do_redraw = True", "= sec if cog_mode and stat.exec_mode != 'lutex' and stat._cog_exec_base_addr == -1: self.function_header.text", "= Fore.GREEN elif self.server.cog_states[i].get_state() == p2db_server.CogState.EXECUTING: fmt = Fore.YELLOW else: fmt = Fore.RED", "except ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED) return ui_instance.server.cog_states[ui_instance.server.current_cog].status.set_cog_addr(addr) @kb.add('c-p') def on_pins(self, args=[]):", ": Get the value in 'reg'. 'reg' can be an address or register", "ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else: ui_instance.update_log(\"long @ {:#02x} -> {:#02x}\".format(int(args[0], 16), r[1]) +", "be in hex getlong <addr> : Get the long at hub address 'addr'.", "accept_handler = self.accept, focus_on_click=True, ) # status window stuff self.status = FormattedTextControl(ANSI('')) self.connection", "= 0 self.instructions = FormattedTextControl(ANSI(''), focusable=True, get_cursor_position=inst_cursor_pos) self.function_header = FormattedTextControl(ANSI('')) instruction_split = HSplit([", "height=5), padding=3, padding_bottom=0, padding_top=1), \"Pins\") ]) # instruction window stuff def inst_cursor_pos(): y", "if (self.server.inb >> i) & 1: bit = 'H' else: bit = 'L'", "y) self.log = FormattedTextControl(ANSI(\"\"), get_cursor_position=log_cursor_pos) self.log_area = Window(self.log) # prompt stuff cmd_completer =", "if (self.server.stat_dirty or self.dirty): self.render_lock.acquire() self.server.stat_dirty = False do_redraw = True stat =", "all instructions in a given section. place the cursor string at PTR '''", "portb_str + Fore.RESET + Style.RESET_ALL self.pins.text = ANSI(pin_str) # update the dissassembly window", "import logging import time import re from . import p2tools from . import", "def on_break(self, args): ui_instance = UI.instance r = ui_instance.server.breakpoint(args[0]) if r: ui_instance.update_log(r +", "Application from prompt_toolkit.layout.containers import VSplit, HSplit, Window from prompt_toolkit.layout.layout import Layout from prompt_toolkit.layout", "from prompt_toolkit.layout.layout import Layout from prompt_toolkit.layout import FormattedTextControl, WindowAlign from prompt_toolkit.key_binding import KeyBindings", "prerender(self, app): self.render_lock.acquire() def postrender(self, app): self.render_lock.release() def data_updater(self): do_redraw = False while(1):", "self.prompt_window ]) layout = Layout(root_container, self.prompt) self.app = Application(layout=layout, key_bindings=self.kb, full_screen=True, before_render=self.prerender, after_render=self.postrender)", "should be in hex getlong <addr> : Get the long at hub address", "ui_instance = UI.instance r = ui_instance.server.step() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-t')", "{:#02x} -> {:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\") def on_cog(self, args): ui_instance = UI.instance", "# update the dissassembly window # get the function the current PC is", "focusable=True, get_cursor_position=inst_cursor_pos) self.function_header = FormattedTextControl(ANSI('')) instruction_split = HSplit([ Box(Window(self.function_header, height=1), 1, padding_top=0), Box(Window(self.instructions,", "stat_dict[k])) stat_text = '\\n'.join(stat_lines) self.status.text = stat_text # draw cog connections status's conn_str", "Step out of the current function call break <addr> : Set breakpoint at", "disconnected until it interrupts itself reset : (unimplemented) Reload the current program quit", "cmd = self.prompt.text args = cmd.split(' ') if args[0] in self.commands: self.commands[args[0]](args[1:]) else:", "r: ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-t') def on_stepin(self, args=[]): ui_instance = UI.instance r", "p2tools.get_section(self.obj_data, call_addr) # else: # call_dest = '' inst = \" {:x}: {}{}{}{}\\n\".format(i,", "\"getbyte\": self.on_getbyte, \"getlong\": self.on_getlong, \"continue\": self.on_continue, \"pins\": self.on_pins, \"cog\": self.on_cog, \"cogaddr\": self.on_cogaddr, \"reset\":", "ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_byte(args[0]) if (r[0]): ui_instance.update_log(r[0] +", "= p2tools.get_section(self.obj_data, call_addr) # if call_addr != 0: # # if call address", "= True @kb.add('c-i') def shift_focus(e): e.app.layout.focus_next() def update_log(self, new_text, color=\"\"): self.log.text = ANSI(self.log.text.value", "for i in range(8): fmt = '' marker = '' if self.server.cog_states[i].get_state() ==", "call_dest = '' inst = \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.LIGHTGREEN_EX, sec[i][1], Fore.RESET) elif", "= 'H' else: bit = 'L' if not self.server.have_pin_data: bit = 'X' color", "{:#02x}\".format(args[0], r[1]) + \"\\n\") def on_getbyte(self, args): ui_instance = UI.instance if len(args) !=", "fmt += Style.BRIGHT marker = '*' conn_str += fmt + '{: >10}'.format('{} Cog", "execution. Cog will be disconnected until it interrupts itself reset : (unimplemented) Reload", "ANSI(self.log.text.value + color + new_text + Fore.RESET) def get_section_str(self, sec, ptr): ''' return", "PTR ''' data_str = '' section_addr = sec['section_addr'] for i in range(section_addr, section_addr", "a given section. place the cursor string at PTR ''' data_str = ''", "at hub address 'addr'. Address should be in hex pins : Update pin", "import Point from prompt_toolkit.formatted_text import ANSI from prompt_toolkit.layout.screen import Char from colorama import", "+ \" ---> \" + Fore.RESET pc_cursor_size = 10 instance = None def", "ANSI(Fore.YELLOW + \"Cog Execution Mode. Set base address with 'cogaddr' to see disassembly\"", "= \" {:x}: {} {}\\n\".format(i, sec[i][0], sec[i][1]) if 'call' in sec[i][1]: # if", "and stat._cog_exec_base_addr == -1: self.function_header.text = ANSI(Fore.YELLOW + \"Cog Execution Mode. Set base", "UI.instance ui_instance.update_log(ui_instance.help_text + \"\\n\") @kb.add('c-s') def on_step(self, args=[]): ui_instance = UI.instance r =", "\"getreg\": self.on_getreg, \"getbyte\": self.on_getbyte, \"getlong\": self.on_getlong, \"continue\": self.on_continue, \"pins\": self.on_pins, \"cog\": self.on_cog, \"cogaddr\":", "''' data_str = '' section_addr = sec['section_addr'] for i in range(section_addr, section_addr +", "sec[i][1], Fore.RESET) else: inst = \" {:x}: {}{}\\n\".format(i, sec[i][0], sec[i][1]) if i ==", "= UI.instance ui_instance.app.exit() def accept(self, buff): cmd = self.prompt.text args = cmd.split(' ')", "disassembly\" + Fore.RESET) self.instructions.text = ANSI(\"\") else: s = self.get_section_str(section, pc) self.instructions.text =", "string at PTR ''' data_str = '' section_addr = sec['section_addr'] for i in", "threading.Lock() # dict of commands and handler function for each self.commands = {", "setq) will be skipped. stepin [Ctrl+T] : Step into a function call stepout", "1: bit = 'H' else: bit = 'L' if not self.server.have_pin_data: bit =", "def on_step(self, args=[]): ui_instance = UI.instance r = ui_instance.server.step() if r: ui_instance.update_log(r +", "the active cog to n cogaddr <addr> : Set the cog execution address", "UI.instance ui_instance.server.continue_exec() def on_reset(self, args): ui_instance = UI.instance ui_instance.update_log('reset unimplemented\\n') def on_quit(self, args):", "self.prompt) self.app = Application(layout=layout, key_bindings=self.kb, full_screen=True, before_render=self.prerender, after_render=self.postrender) self.app.layout.focus(self.prompt_window) @kb.add('c-c') @kb.add('c-q') def exit_(event):", "sec[i][1]) # call_addr = int(r.group(2)) if r else 0 # call_dest = p2tools.get_section(self.obj_data,", "is stored in HUB ram # if call_addr >= 0x200 and call_addr <", "status window stuff self.status = FormattedTextControl(ANSI('')) self.connection = FormattedTextControl(ANSI('')) self.pins = FormattedTextControl(ANSI('')) status_split", "= self self.server = server self.obj_data = objdata self.app = Application(full_screen=True) self.current_func =", "stepout [Ctrl+O] : Step out of the current function call break <addr> :", "into a function call stepout [Ctrl+O] : Step out of the current function", "if r else 0 # call_dest = p2tools.get_section(self.obj_data, call_addr) # if call_addr !=", "args[0] in self.commands: self.commands[args[0]](args[1:]) else: self.update_log(\"Unknown command: \" + args[0] + \"\\n\", Fore.RED)", "long at hub address 'addr'. Address should be in hex pins : Update", "on_continue(self, args=[]): ui_instance = UI.instance ui_instance.server.continue_exec() def on_reset(self, args): ui_instance = UI.instance ui_instance.update_log('reset", "'jmp' in sec[i][1] or 'tj' in sec[i][1] or 'dj' in sec[i][1]: inst =", "= Fore.RED + Style.BRIGHT else: color = Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.ina >>", "\"Cog Execution Mode. Set base address with 'cogaddr' to see disassembly\" + Fore.RESET)", "4*(call_addr - 0x200) + 0x200 # call_dest = p2tools.get_section(self.obj_data, call_addr) # else: #", "on_help(self, args): ui_instance = UI.instance ui_instance.update_log(ui_instance.help_text + \"\\n\") @kb.add('c-s') def on_step(self, args=[]): ui_instance", "return r = ui_instance.server.get_reg(args[0]) if (r[0]): ui_instance.update_log(r[0], Fore.RED) else: try: addr = int(args[0],", "+ \"\\n\") def on_getlong(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error:", "= '' if (self.server.dira >> i) & 1: color = Fore.RED + Style.BRIGHT", "4*(len(sec) - 1), 4): inst = \" {:x}: {} {}\\n\".format(i, sec[i][0], sec[i][1]) if", "Point(0, y) self.pc_line = 0 self.instructions = FormattedTextControl(ANSI(''), focusable=True, get_cursor_position=inst_cursor_pos) self.function_header = FormattedTextControl(ANSI(''))", "self.on_help } # log stuff def log_cursor_pos(): y = self.log.text.value.count('\\n') return Point(0, y)", "\"continue\": self.on_continue, \"pins\": self.on_pins, \"cog\": self.on_cog, \"cogaddr\": self.on_cogaddr, \"reset\": self.on_reset, \"quit\": self.on_quit, \"help\":", "is in pc = stat.get_mem_pc() cog_mode = stat.exec_mode == \"cogex\" func_name = ''", "!= 0: # # if call address is 0x200-0x400, convert it to where", "+ \"{0: <3}\".format(bit) if (self.server.dirb >> i) & 1: color = Fore.RED +", "r = ui_instance.server.breakpoint(args[0]) if r: ui_instance.update_log(r + \"\\n\", Fore.RED) def on_getreg(self, args): ui_instance", "Fore.RESET) elif 'jmp' in sec[i][1] or 'tj' in sec[i][1] or 'dj' in sec[i][1]:", "UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return try: addr", "Fore.RED) def on_break(self, args): ui_instance = UI.instance r = ui_instance.server.breakpoint(args[0]) if r: ui_instance.update_log(r", "& 1: bit = 'H' else: bit = 'L' if not self.server.have_pin_data: bit", "the function the current PC is in pc = stat.get_mem_pc() cog_mode = stat.exec_mode", "= UI.instance r = ui_instance.server.stepin() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-o') def", "# prompt stuff cmd_completer = WordCompleter(list(self.commands.keys())) self.prompt = TextArea( height=1, prompt=\"p2db > \",", ">> i) & 1: bit = 'H' else: bit = 'L' if not", "!= 'lutex' and stat._cog_exec_base_addr == -1: self.function_header.text = ANSI(Fore.YELLOW + \"Cog Execution Mode.", "'\\n'.join(stat_lines) self.status.text = stat_text # draw cog connections status's conn_str = '' for", "in self.obj_data[sec]: section = self.obj_data[sec] func_name = sec if cog_mode and stat.exec_mode !=", "= ANSI(func_name) else: self.status.text = \"*** No connection to cog\" self.function_header.text = ANSI(\"***", "the LUT function is stored in HUB ram # if call_addr >= 0x200", "Set the active cog to n cogaddr <addr> : Set the cog execution", "objdata self.app = Application(full_screen=True) self.current_func = '' self.dirty = True self.render_lock = threading.Lock()", "on_cogaddr(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\",", "call instruction # r = re.search(pat, sec[i][1]) # call_addr = int(r.group(2)) if r", "len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return try: cog_num = int(args[0])", "ui_instance.server.get_long(args[0]) if (r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else: ui_instance.update_log(\"long @ {:#02x} -> {:#02x}\".format(int(args[0],", "e.app.layout.focus_next() def update_log(self, new_text, color=\"\"): self.log.text = ANSI(self.log.text.value + color + new_text +", "'' if self.server.cog_states[i].get_state() == p2db_server.CogState.IDLE: fmt = Fore.GREEN elif self.server.cog_states[i].get_state() == p2db_server.CogState.EXECUTING: fmt", "ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED) return ui_instance.server.set_cog(cog_num) def on_cogaddr(self, args): ui_instance = UI.instance", "stat_dict[k])) else: stat_lines.append(\"{: >30} : {!s: <8}\".format(k, stat_dict[k])) stat_text = '\\n'.join(stat_lines) self.status.text =", "y = max(0, min(self.pc_line, self.instructions.text.value.count('\\n'))) return Point(0, y) self.pc_line = 0 self.instructions =", "expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_long(args[0]) if (r[0]): ui_instance.update_log(r[0] + \"\\n\",", "sec[i][1]: # if 'calla' in sec[i][1]: # pat = r'^(.*?) #\\\\([0-9]+)(.*?)' # pattern", "1: color = Fore.RED + Style.BRIGHT else: color = Fore.LIGHTBLUE_EX + Style.BRIGHT if", "stat.exec_mode == \"cogex\" func_name = '' for sec in self.obj_data: if pc in", "to get the address of a call instruction # r = re.search(pat, sec[i][1])", "execution address (for native cogs) continue : (unimplemented) Continue execution. Cog will be", "function is stored in HUB ram # if call_addr >= 0x200 and call_addr", "from prompt_toolkit.widgets import Frame, TextArea, Box from prompt_toolkit.completion import WordCompleter from prompt_toolkit.data_structures import", ": Update pin status data cog <n> : Set the active cog to", "1 argument\\n\", Fore.RED) return r = ui_instance.server.get_reg(args[0]) if (r[0]): ui_instance.update_log(r[0], Fore.RED) else: try:", "(augd/s, setq) will be skipped. stepin [Ctrl+T] : Step into a function call", "Fore.GREEN elif self.server.cog_states[i].get_state() == p2db_server.CogState.EXECUTING: fmt = Fore.YELLOW else: fmt = Fore.RED if", "marker = '*' conn_str += fmt + '{: >10}'.format('{} Cog {}\\n'.format(marker, i)) +", "self.pc_line = int((i - section_addr)/4) else: data_str += ' '*self.pc_cursor_size + inst +", "ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_long(args[0]) if (r[0]): ui_instance.update_log(r[0] +", "= '' for i in range(8): fmt = '' marker = '' if", "= self.log.text.value.count('\\n') return Point(0, y) self.log = FormattedTextControl(ANSI(\"\"), get_cursor_position=log_cursor_pos) self.log_area = Window(self.log) #", "ram # if call_addr >= 0x200 and call_addr < 0x400: # call_addr =", "Frame, TextArea, Box from prompt_toolkit.completion import WordCompleter from prompt_toolkit.data_structures import Point from prompt_toolkit.formatted_text", "padding=3, padding_bottom=0, padding_top=1), \"Pins\") ]) # instruction window stuff def inst_cursor_pos(): y =", "self.cog_status_window, self.instruction_window, ]) root_container = HSplit([ body, self.log_window, self.prompt_window ]) layout = Layout(root_container,", "elif self.server.cog_states[i].get_state() == p2db_server.CogState.EXECUTING: fmt = Fore.YELLOW else: fmt = Fore.RED if i", "if call_addr != 0: # # if call address is 0x200-0x400, convert it", "if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_byte(args[0])", "commands and handler function for each self.commands = { \"step\": self.on_step, \"stepin\": self.on_stepin,", "4): inst = \" {:x}: {} {}\\n\".format(i, sec[i][0], sec[i][1]) if 'call' in sec[i][1]:", "'' section_addr = sec['section_addr'] for i in range(section_addr, section_addr + 4*(len(sec) - 1),", "elif k == 'pc': stat_lines.append(\"{: >30} : {: <#8x}\".format(k, stat_dict[k])) else: stat_lines.append(\"{: >30}", "r: ui_instance.update_log(r + \"\\n\", Fore.RED) def on_getreg(self, args): ui_instance = UI.instance if len(args)", "ui_instance = UI.instance r = ui_instance.server.stepin() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-o')", "self.render_lock.release() self.app.invalidate() do_redraw = False time.sleep(0.02) def run(self): t = threading.Thread(target=self.data_updater, daemon=True) t.start()", "'*self.pc_cursor_size + inst + Style.RESET_ALL return data_str def prerender(self, app): self.render_lock.acquire() def postrender(self,", "get the log data while not self.server.log_queue.empty(): c = self.server.log_queue.get() if c !=", "if pc in self.obj_data[sec]: section = self.obj_data[sec] func_name = sec if cog_mode and", "app): self.render_lock.release() def data_updater(self): do_redraw = False while(1): if (self.server.stat_dirty or self.dirty): self.render_lock.acquire()", "return r = ui_instance.server.get_byte(args[0]) if (r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else: ui_instance.update_log(\"byte @", "self.update_log(\"Unknown command: \" + args[0] + \"\\n\", Fore.RED) self.dirty = True @kb.add('c-i') def", "+ \"\\n\") def on_getbyte(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error:", "else 0 # call_dest = p2tools.get_section(self.obj_data, call_addr) # if call_addr != 0: #", "= Fore.YELLOW else: fmt = Fore.RED if i == self.server.current_cog: fmt += Style.BRIGHT", "def on_getlong(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected 1", "'pc': stat_lines.append(\"{: >30} : {: <#8x}\".format(k, stat_dict[k])) else: stat_lines.append(\"{: >30} : {!s: <8}\".format(k,", "import time import re from . import p2tools from . import p2db_server log", "Get the byte at hub address 'addr'. Address should be in hex getlong", "self.on_cogaddr, \"reset\": self.on_reset, \"quit\": self.on_quit, \"help\": self.on_help } # log stuff def log_cursor_pos():", "'\\n\\n\\n' + portb_str + Fore.RESET + Style.RESET_ALL self.pins.text = ANSI(pin_str) # update the", "self.pins.text = ANSI(pin_str) # update the dissassembly window # get the function the", "Fore.RESET pc_cursor_size = 10 instance = None def __init__(self, server: p2db_server.P2DBServer, objdata): assert(not", "width=95, height=5), padding=3, padding_bottom=0, padding_top=1), \"Pins\") ]) # instruction window stuff def inst_cursor_pos():", "if (self.server.ina >> i) & 1: bit = 'H' else: bit = 'L'", "\"\\n\", Fore.RED) @kb.add('c-t') def on_stepin(self, args=[]): ui_instance = UI.instance r = ui_instance.server.stepin() if", "hex pins : Update pin status data cog <n> : Set the active", "\"stepin\": self.on_stepin, \"stepout\": self.on_stepout, \"break\": self.on_break, \"getreg\": self.on_getreg, \"getbyte\": self.on_getbyte, \"getlong\": self.on_getlong, \"continue\":", "to see disassembly\" + Fore.RESET) self.instructions.text = ANSI(\"\") else: s = self.get_section_str(section, pc)", ": Set breakpoint at 'addr' and continue. 'addr' should be in hex getreg", "'lutex' and stat._cog_exec_base_addr == -1: self.function_header.text = ANSI(Fore.YELLOW + \"Cog Execution Mode. Set", "exit_(event): event.app.exit() def on_help(self, args): ui_instance = UI.instance ui_instance.update_log(ui_instance.help_text + \"\\n\") @kb.add('c-s') def", "i) & 1: color = Fore.RED + Style.BRIGHT else: color = Fore.LIGHTBLUE_EX +", "ui_instance.server.set_cog(cog_num) def on_cogaddr(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error: expected", "from . import p2tools from . import p2db_server log = logging.getLogger('main') Char.display_mappings['\\t'] =", ">10}'.format('{} Cog {}\\n'.format(marker, i)) + Style.RESET_ALL + Fore.RESET self.connection.text = ANSI(conn_str) # draw", "height=40), 1) ]) # Frames for each section self.cog_status_window = Frame(Box(status_split, 1), \"Status\")", "cogs) continue : (unimplemented) Continue execution. Cog will be disconnected until it interrupts", "(self.server.dira >> i) & 1: color = Fore.RED + Style.BRIGHT else: color =", "+ new_text + Fore.RESET) def get_section_str(self, sec, ptr): ''' return a atring for", "Box(Window(self.function_header, height=1), 1, padding_top=0), Box(Window(self.instructions, height=40), 1) ]) # Frames for each section", "= int(r.group(2)) if r else 0 # call_dest = p2tools.get_section(self.obj_data, call_addr) # if", "+= Style.BRIGHT + self.pc_cursor_string + inst + Style.RESET_ALL self.pc_line = int((i - section_addr)/4)", "[Ctrl+Q] : Quit ''' pc_cursor_string = Fore.CYAN + \" ---> \" + Fore.RESET", "if i == self.server.current_cog: fmt += Style.BRIGHT marker = '*' conn_str += fmt", "= Layout(root_container, self.prompt) self.app = Application(layout=layout, key_bindings=self.kb, full_screen=True, before_render=self.prerender, after_render=self.postrender) self.app.layout.focus(self.prompt_window) @kb.add('c-c') @kb.add('c-q')", "= UI.instance r = ui_instance.server.step() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-t') def", "get_cursor_position=log_cursor_pos) self.log_area = Window(self.log) # prompt stuff cmd_completer = WordCompleter(list(self.commands.keys())) self.prompt = TextArea(", "one instruction. Call instructions are stepped over. Modifier instructions (augd/s, setq) will be", "\"{0: <3}\".format(bit) pin_str = porta_str + '\\n\\n\\n' + portb_str + Fore.RESET + Style.RESET_ALL", "in hex pins : Update pin status data cog <n> : Set the", "data cog <n> : Set the active cog to n cogaddr <addr> :", "function the current PC is in pc = stat.get_mem_pc() cog_mode = stat.exec_mode ==", "= Fore.CYAN + \" ---> \" + Fore.RESET pc_cursor_size = 10 instance =", "stat.exec_mode != 'lutex' and stat._cog_exec_base_addr == -1: self.function_header.text = ANSI(Fore.YELLOW + \"Cog Execution", "color = Fore.LIGHTBLACK_EX porta_str += color + \"{0: <3}\".format(bit) if (self.server.dirb >> i)", "argument\\n\", Fore.RED) return ui_instance.server.set_cog(cog_num) def on_cogaddr(self, args): ui_instance = UI.instance if len(args) !=", "No connection to cog\" self.function_header.text = ANSI(\"*** No connection to cog\") # get", "self.on_pins, \"cog\": self.on_cog, \"cogaddr\": self.on_cogaddr, \"reset\": self.on_reset, \"quit\": self.on_quit, \"help\": self.on_help } #", "= self.obj_data[sec] func_name = sec if cog_mode and stat.exec_mode != 'lutex' and stat._cog_exec_base_addr", "def __init__(self, server: p2db_server.P2DBServer, objdata): assert(not UI.instance) UI.instance = self self.server = server", "Fore.RED) else: try: addr = int(args[0], 16) ui_instance.update_log(\"reg {:#02x} -> {:#02x}\".format(addr, r[1]) +", "each self.commands = { \"step\": self.on_step, \"stepin\": self.on_stepin, \"stepout\": self.on_stepout, \"break\": self.on_break, \"getreg\":", "if not self.server.have_pin_data: bit = 'X' color = Fore.LIGHTBLACK_EX porta_str += color +", "in self.obj_data: if pc in self.obj_data[sec]: section = self.obj_data[sec] func_name = sec if", "section = self.obj_data[sec] func_name = sec if cog_mode and stat.exec_mode != 'lutex' and", "Cog {}\\n'.format(marker, i)) + Style.RESET_ALL + Fore.RESET self.connection.text = ANSI(conn_str) # draw the", "elif 'jmp' in sec[i][1] or 'tj' in sec[i][1] or 'dj' in sec[i][1]: inst", "@kb.add('c-i') def shift_focus(e): e.app.layout.focus_next() def update_log(self, new_text, color=\"\"): self.log.text = ANSI(self.log.text.value + color", "\"help\": self.on_help } # log stuff def log_cursor_pos(): y = self.log.text.value.count('\\n') return Point(0,", "{:x}: {} {}\\n\".format(i, sec[i][0], sec[i][1]) if 'call' in sec[i][1]: # if 'calla' in", "hex getlong <addr> : Get the long at hub address 'addr'. Address should", "True stat = self.server.get_status() if (stat): # draw the status dictionary stat_dict =", "(self.server.inb >> i) & 1: bit = 'H' else: bit = 'L' if", "---> \" + Fore.RESET pc_cursor_size = 10 instance = None def __init__(self, server:", "r: ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-o') def on_stepout(self, args=[]): ui_instance = UI.instance r", "active cog to n cogaddr <addr> : Set the cog execution address (for", "Update pin status data cog <n> : Set the active cog to n", "-> {:#02x}\".format(addr, r[1]) + \"\\n\") except ValueError: ui_instance.update_log(\"{} -> {:#02x}\".format(args[0], r[1]) + \"\\n\")", "= '' section_addr = sec['section_addr'] for i in range(section_addr, section_addr + 4*(len(sec) -", "atring for all instructions in a given section. place the cursor string at", "= FormattedTextControl(ANSI('')) self.pins = FormattedTextControl(ANSI('')) status_split = HSplit([ VSplit([ Window(self.status), Box(Window(self.connection, align=WindowAlign.RIGHT), 3,", "Frame(Box(status_split, 1), \"Status\") self.instruction_window = Frame(Box(instruction_split, 1), \"Source\") self.log_window = Frame(Box(self.log_area, padding=1, padding_bottom=0))", "= ui_instance.server.stepout() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) def on_break(self, args): ui_instance =", "i)) + Style.RESET_ALL + Fore.RESET self.connection.text = ANSI(conn_str) # draw the pin states", "# pattern to get the address of a call instruction # r =", "def on_stepout(self, args=[]): ui_instance = UI.instance r = ui_instance.server.stepout() if r: ui_instance.update_log(r +", "ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED) return ui_instance.server.cog_states[ui_instance.server.current_cog].status.set_cog_addr(addr) @kb.add('c-p') def on_pins(self, args=[]): ui_instance =", "ui_instance.update_log(r[0], Fore.RED) else: try: addr = int(args[0], 16) ui_instance.update_log(\"reg {:#02x} -> {:#02x}\".format(addr, r[1])", "self.log_window = Frame(Box(self.log_area, padding=1, padding_bottom=0)) self.prompt_window = Frame(self.prompt) body = VSplit([ self.cog_status_window, self.instruction_window,", "self.on_continue, \"pins\": self.on_pins, \"cog\": self.on_cog, \"cogaddr\": self.on_cogaddr, \"reset\": self.on_reset, \"quit\": self.on_quit, \"help\": self.on_help", "status data cog <n> : Set the active cog to n cogaddr <addr>", "+ Fore.RESET) def get_section_str(self, sec, ptr): ''' return a atring for all instructions", "+ \"\\n\", Fore.RED) else: ui_instance.update_log(\"byte @ {:#02x} -> {:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\")", "section_addr)/4) else: data_str += ' '*self.pc_cursor_size + inst + Style.RESET_ALL return data_str def", "def data_updater(self): do_redraw = False while(1): if (self.server.stat_dirty or self.dirty): self.render_lock.acquire() self.server.stat_dirty =", "bit = '' if (self.server.dira >> i) & 1: color = Fore.RED +", "in sec[i][1] or 'tj' in sec[i][1] or 'dj' in sec[i][1]: inst = \"", "= Fore.LIGHTBLACK_EX portb_str += color + \"{0: <3}\".format(bit) pin_str = porta_str + '\\n\\n\\n'", "dialog step [Ctrl+S] : Step by one instruction. Call instructions are stepped over.", "+ \"\\n\", Fore.RED) @kb.add('c-o') def on_stepout(self, args=[]): ui_instance = UI.instance r = ui_instance.server.stepout()", "16) except ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED) return ui_instance.server.cog_states[ui_instance.server.current_cog].status.set_cog_addr(addr) @kb.add('c-p') def on_pins(self,", "fmt = Fore.RED if i == self.server.current_cog: fmt += Style.BRIGHT marker = '*'", "= 10 instance = None def __init__(self, server: p2db_server.P2DBServer, objdata): assert(not UI.instance) UI.instance", "= TextArea( height=1, prompt=\"p2db > \", multiline=False, wrap_lines=False, complete_while_typing=True, completer=cmd_completer, accept_handler = self.accept,", "be in hex pins : Update pin status data cog <n> : Set", "to where the LUT function is stored in HUB ram # if call_addr", "+ Style.BRIGHT else: color = Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.ina >> i) &", "ui_instance.server.stepout() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) def on_break(self, args): ui_instance = UI.instance", "pin status data cog <n> : Set the active cog to n cogaddr", "= cmd.split(' ') if args[0] in self.commands: self.commands[args[0]](args[1:]) else: self.update_log(\"Unknown command: \" +", "= ''' p2db ---- help : Print this dialog step [Ctrl+S] : Step", "fmt = Fore.GREEN elif self.server.cog_states[i].get_state() == p2db_server.CogState.EXECUTING: fmt = Fore.YELLOW else: fmt =", ") # status window stuff self.status = FormattedTextControl(ANSI('')) self.connection = FormattedTextControl(ANSI('')) self.pins =", "sec[i][0], sec[i][1]) if i == ptr: data_str += Style.BRIGHT + self.pc_cursor_string + inst", "= Fore.RED + Style.BRIGHT else: color = Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.inb >>", "True @kb.add('c-i') def shift_focus(e): e.app.layout.focus_next() def update_log(self, new_text, color=\"\"): self.log.text = ANSI(self.log.text.value +", "= \" {:x}: {}{}\\n\".format(i, sec[i][0], sec[i][1]) if i == ptr: data_str += Style.BRIGHT", "i in range(section_addr, section_addr + 4*(len(sec) - 1), 4): inst = \" {:x}:", "call_addr = 4*(call_addr - 0x200) + 0x200 # call_dest = p2tools.get_section(self.obj_data, call_addr) #", "not self.server.log_queue.empty(): c = self.server.log_queue.get() if c != '\\r': self.update_log(c, Fore.LIGHTGREEN_EX) if do_redraw:", "args): ui_instance = UI.instance ui_instance.update_log(ui_instance.help_text + \"\\n\") @kb.add('c-s') def on_step(self, args=[]): ui_instance =", "Fore.RED + Style.BRIGHT else: color = Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.ina >> i)", "ANSI(pin_str) # update the dissassembly window # get the function the current PC", "= '' inst = \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.LIGHTGREEN_EX, sec[i][1], Fore.RESET) elif 'jmp'", "int((i - section_addr)/4) else: data_str += ' '*self.pc_cursor_size + inst + Style.RESET_ALL return", "HSplit([ Box(Window(self.function_header, height=1), 1, padding_top=0), Box(Window(self.instructions, height=40), 1) ]) # Frames for each", "stat.get_mem_pc() cog_mode = stat.exec_mode == \"cogex\" func_name = '' for sec in self.obj_data:", "on_pins(self, args=[]): ui_instance = UI.instance ui_instance.server.update_pins() def on_continue(self, args=[]): ui_instance = UI.instance ui_instance.server.continue_exec()", "PC is in pc = stat.get_mem_pc() cog_mode = stat.exec_mode == \"cogex\" func_name =", "cog_num = int(args[0]) except ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED) return ui_instance.server.set_cog(cog_num) def", "len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_long(args[0]) if", "in range(32): bit = '' if (self.server.dira >> i) & 1: color =", "except ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED) return ui_instance.server.set_cog(cog_num) def on_cogaddr(self, args): ui_instance", "Fore.RED) return ui_instance.server.cog_states[ui_instance.server.current_cog].status.set_cog_addr(addr) @kb.add('c-p') def on_pins(self, args=[]): ui_instance = UI.instance ui_instance.server.update_pins() def on_continue(self,", "self.status.text = \"*** No connection to cog\" self.function_header.text = ANSI(\"*** No connection to", "k.startswith('_'): pass elif k == 'pc': stat_lines.append(\"{: >30} : {: <#8x}\".format(k, stat_dict[k])) else:", "1, padding_top=0), Box(Window(self.instructions, height=40), 1) ]) # Frames for each section self.cog_status_window =", "\"Status\") self.instruction_window = Frame(Box(instruction_split, 1), \"Source\") self.log_window = Frame(Box(self.log_area, padding=1, padding_bottom=0)) self.prompt_window =", "<addr> : Set the cog execution address (for native cogs) continue : (unimplemented)", "Layout from prompt_toolkit.layout import FormattedTextControl, WindowAlign from prompt_toolkit.key_binding import KeyBindings from prompt_toolkit.widgets import", "r[1]) + \"\\n\") def on_cog(self, args): ui_instance = UI.instance if len(args) != 1:", "- 0x200) + 0x200 # call_dest = p2tools.get_section(self.obj_data, call_addr) # else: # call_dest", "ui_instance.server.breakpoint(args[0]) if r: ui_instance.update_log(r + \"\\n\", Fore.RED) def on_getreg(self, args): ui_instance = UI.instance", "Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.ina >> i) & 1: bit = 'H' else:", "of the current function call break <addr> : Set breakpoint at 'addr' and", "getlong <addr> : Get the long at hub address 'addr'. Address should be", "= UI.instance r = ui_instance.server.breakpoint(args[0]) if r: ui_instance.update_log(r + \"\\n\", Fore.RED) def on_getreg(self,", "from prompt_toolkit.layout import FormattedTextControl, WindowAlign from prompt_toolkit.key_binding import KeyBindings from prompt_toolkit.widgets import Frame,", "see disassembly\" + Fore.RESET) self.instructions.text = ANSI(\"\") else: s = self.get_section_str(section, pc) self.instructions.text", "'' portb_str = '' for i in range(32): bit = '' if (self.server.dira", "Fore.RED if i == self.server.current_cog: fmt += Style.BRIGHT marker = '*' conn_str +=", "inst = \" {:x}: {}{}\\n\".format(i, sec[i][0], sec[i][1]) if i == ptr: data_str +=", "len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_byte(args[0]) if", "sec if cog_mode and stat.exec_mode != 'lutex' and stat._cog_exec_base_addr == -1: self.function_header.text =", "r = ui_instance.server.get_reg(args[0]) if (r[0]): ui_instance.update_log(r[0], Fore.RED) else: try: addr = int(args[0], 16)", "expected 1 argument\\n\", Fore.RED) return try: cog_num = int(args[0]) except ValueError: ui_instance.update_log(\"Error: expected", "prompt=\"p2db > \", multiline=False, wrap_lines=False, complete_while_typing=True, completer=cmd_completer, accept_handler = self.accept, focus_on_click=True, ) #", "args): ui_instance = UI.instance r = ui_instance.server.breakpoint(args[0]) if r: ui_instance.update_log(r + \"\\n\", Fore.RED)", "# if call_addr != 0: # # if call address is 0x200-0x400, convert", "# call_dest = p2tools.get_section(self.obj_data, call_addr) # if call_addr != 0: # # if", "the status dictionary stat_dict = vars(self.server.get_status()) stat_lines = [] for k in stat_dict:", "return ui_instance.server.set_cog(cog_num) def on_cogaddr(self, args): ui_instance = UI.instance if len(args) != 1: ui_instance.update_log(\"Error:", "i) & 1: bit = 'H' else: bit = 'L' if not self.server.have_pin_data:", "== ptr: data_str += Style.BRIGHT + self.pc_cursor_string + inst + Style.RESET_ALL self.pc_line =", "No connection to cog\") # get the log data while not self.server.log_queue.empty(): c", "1 argument\\n\", Fore.RED) return try: cog_num = int(args[0]) except ValueError: ui_instance.update_log(\"Error: expected numeric", "self.server.have_pin_data: bit = 'X' color = Fore.LIGHTBLACK_EX porta_str += color + \"{0: <3}\".format(bit)", "TextArea( height=1, prompt=\"p2db > \", multiline=False, wrap_lines=False, complete_while_typing=True, completer=cmd_completer, accept_handler = self.accept, focus_on_click=True,", "padding_bottom=0, padding_top=1), \"Pins\") ]) # instruction window stuff def inst_cursor_pos(): y = max(0,", "can be an address or register name. Address should be in hex getbyte", "y) self.pc_line = 0 self.instructions = FormattedTextControl(ANSI(''), focusable=True, get_cursor_position=inst_cursor_pos) self.function_header = FormattedTextControl(ANSI('')) instruction_split", "numeric argument\\n\", Fore.RED) return ui_instance.server.cog_states[ui_instance.server.current_cog].status.set_cog_addr(addr) @kb.add('c-p') def on_pins(self, args=[]): ui_instance = UI.instance ui_instance.server.update_pins()", "\"\\n\", Fore.RED) else: ui_instance.update_log(\"long @ {:#02x} -> {:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\") def", "0 self.instructions = FormattedTextControl(ANSI(''), focusable=True, get_cursor_position=inst_cursor_pos) self.function_header = FormattedTextControl(ANSI('')) instruction_split = HSplit([ Box(Window(self.function_header,", "threading import logging import time import re from . import p2tools from .", "UI: kb = KeyBindings() help_text = ''' p2db ---- help : Print this", "Fore.RESET self.connection.text = ANSI(conn_str) # draw the pin states porta_str = '' portb_str", "if self.server.cog_states[i].get_state() == p2db_server.CogState.IDLE: fmt = Fore.GREEN elif self.server.cog_states[i].get_state() == p2db_server.CogState.EXECUTING: fmt =", "'\\r': self.update_log(c, Fore.LIGHTGREEN_EX) if do_redraw: self.render_lock.release() self.app.invalidate() do_redraw = False time.sleep(0.02) def run(self):", "the pin states porta_str = '' portb_str = '' for i in range(32):", "ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED) return ui_instance.server.set_cog(cog_num) def on_cogaddr(self, args): ui_instance =", "stat_lines = [] for k in stat_dict: if k.startswith('_'): pass elif k ==", "ui_instance.update_log(ui_instance.help_text + \"\\n\") @kb.add('c-s') def on_step(self, args=[]): ui_instance = UI.instance r = ui_instance.server.step()", "the current function call break <addr> : Set breakpoint at 'addr' and continue.", "r = re.search(pat, sec[i][1]) # call_addr = int(r.group(2)) if r else 0 #", "(r[0]): ui_instance.update_log(r[0] + \"\\n\", Fore.RED) else: ui_instance.update_log(\"byte @ {:#02x} -> {:#02x}\".format(int(args[0], 16), r[1])", "numeric argument\\n\", Fore.RED) return ui_instance.server.set_cog(cog_num) def on_cogaddr(self, args): ui_instance = UI.instance if len(args)", "Style.BRIGHT + self.pc_cursor_string + inst + Style.RESET_ALL self.pc_line = int((i - section_addr)/4) else:", "def inst_cursor_pos(): y = max(0, min(self.pc_line, self.instructions.text.value.count('\\n'))) return Point(0, y) self.pc_line = 0", "assert(not UI.instance) UI.instance = self self.server = server self.obj_data = objdata self.app =", "Cog will be disconnected until it interrupts itself reset : (unimplemented) Reload the", "Fore.RED) return try: cog_num = int(args[0]) except ValueError: ui_instance.update_log(\"Error: expected numeric argument\\n\", Fore.RED)", "if (stat): # draw the status dictionary stat_dict = vars(self.server.get_status()) stat_lines = []", "= FormattedTextControl(ANSI('')) status_split = HSplit([ VSplit([ Window(self.status), Box(Window(self.connection, align=WindowAlign.RIGHT), 3, padding_top=0) ]), Frame(Box(Window(self.pins,", "hub address 'addr'. Address should be in hex getlong <addr> : Get the", "+ Fore.RESET + Style.RESET_ALL self.pins.text = ANSI(pin_str) # update the dissassembly window #", "body = VSplit([ self.cog_status_window, self.instruction_window, ]) root_container = HSplit([ body, self.log_window, self.prompt_window ])", "UI.instance = self self.server = server self.obj_data = objdata self.app = Application(full_screen=True) self.current_func", "self self.server = server self.obj_data = objdata self.app = Application(full_screen=True) self.current_func = ''", "before_render=self.prerender, after_render=self.postrender) self.app.layout.focus(self.prompt_window) @kb.add('c-c') @kb.add('c-q') def exit_(event): event.app.exit() def on_help(self, args): ui_instance =", "') if args[0] in self.commands: self.commands[args[0]](args[1:]) else: self.update_log(\"Unknown command: \" + args[0] +", "self.render_lock = threading.Lock() # dict of commands and handler function for each self.commands", "stepin [Ctrl+T] : Step into a function call stepout [Ctrl+O] : Step out", "status dictionary stat_dict = vars(self.server.get_status()) stat_lines = [] for k in stat_dict: if", "= True stat = self.server.get_status() if (stat): # draw the status dictionary stat_dict", "return Point(0, y) self.log = FormattedTextControl(ANSI(\"\"), get_cursor_position=log_cursor_pos) self.log_area = Window(self.log) # prompt stuff", "self.pc_line = 0 self.instructions = FormattedTextControl(ANSI(''), focusable=True, get_cursor_position=inst_cursor_pos) self.function_header = FormattedTextControl(ANSI('')) instruction_split =", "else: self.update_log(\"Unknown command: \" + args[0] + \"\\n\", Fore.RED) self.dirty = True @kb.add('c-i')", "cogaddr <addr> : Set the cog execution address (for native cogs) continue :", "addr = int(args[0], 16) ui_instance.update_log(\"reg {:#02x} -> {:#02x}\".format(addr, r[1]) + \"\\n\") except ValueError:", "self.on_getlong, \"continue\": self.on_continue, \"pins\": self.on_pins, \"cog\": self.on_cog, \"cogaddr\": self.on_cogaddr, \"reset\": self.on_reset, \"quit\": self.on_quit,", "= '' portb_str = '' for i in range(32): bit = '' if", "= Application(full_screen=True) self.current_func = '' self.dirty = True self.render_lock = threading.Lock() # dict", "key_bindings=self.kb, full_screen=True, before_render=self.prerender, after_render=self.postrender) self.app.layout.focus(self.prompt_window) @kb.add('c-c') @kb.add('c-q') def exit_(event): event.app.exit() def on_help(self, args):", "{:#02x} -> {:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\") def on_getlong(self, args): ui_instance = UI.instance", "layout = Layout(root_container, self.prompt) self.app = Application(layout=layout, key_bindings=self.kb, full_screen=True, before_render=self.prerender, after_render=self.postrender) self.app.layout.focus(self.prompt_window) @kb.add('c-c')", "return a atring for all instructions in a given section. place the cursor", "Frames for each section self.cog_status_window = Frame(Box(status_split, 1), \"Status\") self.instruction_window = Frame(Box(instruction_split, 1),", "inst = \" {:x}: {} {}\\n\".format(i, sec[i][0], sec[i][1]) if 'call' in sec[i][1]: #", "'' for i in range(8): fmt = '' marker = '' if self.server.cog_states[i].get_state()", "self.pins = FormattedTextControl(ANSI('')) status_split = HSplit([ VSplit([ Window(self.status), Box(Window(self.connection, align=WindowAlign.RIGHT), 3, padding_top=0) ]),", "bit = 'H' else: bit = 'L' if not self.server.have_pin_data: bit = 'X'", "'addr'. Address should be in hex getlong <addr> : Get the long at", "i in range(8): fmt = '' marker = '' if self.server.cog_states[i].get_state() == p2db_server.CogState.IDLE:", "expected 1 argument\\n\", Fore.RED) return r = ui_instance.server.get_byte(args[0]) if (r[0]): ui_instance.update_log(r[0] + \"\\n\",", "stuff def log_cursor_pos(): y = self.log.text.value.count('\\n') return Point(0, y) self.log = FormattedTextControl(ANSI(\"\"), get_cursor_position=log_cursor_pos)", "import KeyBindings from prompt_toolkit.widgets import Frame, TextArea, Box from prompt_toolkit.completion import WordCompleter from", "over. Modifier instructions (augd/s, setq) will be skipped. stepin [Ctrl+T] : Step into", "if len(args) != 1: ui_instance.update_log(\"Error: expected 1 argument\\n\", Fore.RED) return try: cog_num =", "UI.instance ui_instance.server.update_pins() def on_continue(self, args=[]): ui_instance = UI.instance ui_instance.server.continue_exec() def on_reset(self, args): ui_instance", "the cursor string at PTR ''' data_str = '' section_addr = sec['section_addr'] for", "Box from prompt_toolkit.completion import WordCompleter from prompt_toolkit.data_structures import Point from prompt_toolkit.formatted_text import ANSI", "== p2db_server.CogState.EXECUTING: fmt = Fore.YELLOW else: fmt = Fore.RED if i == self.server.current_cog:", "s = self.get_section_str(section, pc) self.instructions.text = ANSI(s) self.function_header.text = ANSI(func_name) else: self.status.text =", "fmt = '' marker = '' if self.server.cog_states[i].get_state() == p2db_server.CogState.IDLE: fmt = Fore.GREEN", "stat._cog_exec_base_addr == -1: self.function_header.text = ANSI(Fore.YELLOW + \"Cog Execution Mode. Set base address", "cog\") # get the log data while not self.server.log_queue.empty(): c = self.server.log_queue.get() if", "= Frame(Box(status_split, 1), \"Status\") self.instruction_window = Frame(Box(instruction_split, 1), \"Source\") self.log_window = Frame(Box(self.log_area, padding=1,", "padding_bottom=0)) self.prompt_window = Frame(self.prompt) body = VSplit([ self.cog_status_window, self.instruction_window, ]) root_container = HSplit([", "be an address or register name. Address should be in hex getbyte <addr>", "ui_instance.update_log(\"long @ {:#02x} -> {:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\") def on_cog(self, args): ui_instance", "+ \"\\n\", Fore.RED) else: ui_instance.update_log(\"long @ {:#02x} -> {:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\")", "else: # call_dest = '' inst = \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.LIGHTGREEN_EX, sec[i][1],", "expected numeric argument\\n\", Fore.RED) return ui_instance.server.set_cog(cog_num) def on_cogaddr(self, args): ui_instance = UI.instance if", "self.log_window, self.prompt_window ]) layout = Layout(root_container, self.prompt) self.app = Application(layout=layout, key_bindings=self.kb, full_screen=True, before_render=self.prerender,", "p2db_server.P2DBServer, objdata): assert(not UI.instance) UI.instance = self self.server = server self.obj_data = objdata", "= FormattedTextControl(ANSI(\"\"), get_cursor_position=log_cursor_pos) self.log_area = Window(self.log) # prompt stuff cmd_completer = WordCompleter(list(self.commands.keys())) self.prompt", "the value in 'reg'. 'reg' can be an address or register name. Address", "should be in hex getreg <reg> : Get the value in 'reg'. 'reg'", "stat = self.server.get_status() if (stat): # draw the status dictionary stat_dict = vars(self.server.get_status())", "= UI.instance ui_instance.update_log(ui_instance.help_text + \"\\n\") @kb.add('c-s') def on_step(self, args=[]): ui_instance = UI.instance r", "= 'X' color = Fore.LIGHTBLACK_EX porta_str += color + \"{0: <3}\".format(bit) if (self.server.dirb", "Point from prompt_toolkit.formatted_text import ANSI from prompt_toolkit.layout.screen import Char from colorama import Fore,", "Frame(Box(instruction_split, 1), \"Source\") self.log_window = Frame(Box(self.log_area, padding=1, padding_bottom=0)) self.prompt_window = Frame(self.prompt) body =", "self.status.text = stat_text # draw cog connections status's conn_str = '' for i", "self.dirty): self.render_lock.acquire() self.server.stat_dirty = False do_redraw = True stat = self.server.get_status() if (stat):", "buff): cmd = self.prompt.text args = cmd.split(' ') if args[0] in self.commands: self.commands[args[0]](args[1:])", "\"Pins\") ]) # instruction window stuff def inst_cursor_pos(): y = max(0, min(self.pc_line, self.instructions.text.value.count('\\n')))", "inst = \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.LIGHTGREEN_EX, sec[i][1], Fore.RESET) elif 'jmp' in sec[i][1]", "self.instruction_window = Frame(Box(instruction_split, 1), \"Source\") self.log_window = Frame(Box(self.log_area, padding=1, padding_bottom=0)) self.prompt_window = Frame(self.prompt)", "range(section_addr, section_addr + 4*(len(sec) - 1), 4): inst = \" {:x}: {} {}\\n\".format(i,", "portb_str += color + \"{0: <3}\".format(bit) pin_str = porta_str + '\\n\\n\\n' + portb_str", "at hub address 'addr'. Address should be in hex getlong <addr> : Get", "it interrupts itself reset : (unimplemented) Reload the current program quit [Ctrl+Q] :", "]) # Frames for each section self.cog_status_window = Frame(Box(status_split, 1), \"Status\") self.instruction_window =", "in pc = stat.get_mem_pc() cog_mode = stat.exec_mode == \"cogex\" func_name = '' for", "'cogaddr' to see disassembly\" + Fore.RESET) self.instructions.text = ANSI(\"\") else: s = self.get_section_str(section,", "if i == ptr: data_str += Style.BRIGHT + self.pc_cursor_string + inst + Style.RESET_ALL", "if r: ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-o') def on_stepout(self, args=[]): ui_instance = UI.instance", "reset : (unimplemented) Reload the current program quit [Ctrl+Q] : Quit ''' pc_cursor_string", "]), Frame(Box(Window(self.pins, width=95, height=5), padding=3, padding_bottom=0, padding_top=1), \"Pins\") ]) # instruction window stuff", "for all instructions in a given section. place the cursor string at PTR", "{}{}{}{}\\n\".format(i, sec[i][0], Fore.CYAN, sec[i][1], Fore.RESET) else: inst = \" {:x}: {}{}\\n\".format(i, sec[i][0], sec[i][1])", "for k in stat_dict: if k.startswith('_'): pass elif k == 'pc': stat_lines.append(\"{: >30}", "instruction window stuff def inst_cursor_pos(): y = max(0, min(self.pc_line, self.instructions.text.value.count('\\n'))) return Point(0, y)", "Fore.LIGHTBLACK_EX portb_str += color + \"{0: <3}\".format(bit) pin_str = porta_str + '\\n\\n\\n' +", "else: ui_instance.update_log(\"long @ {:#02x} -> {:#02x}\".format(int(args[0], 16), r[1]) + \"\\n\") def on_cog(self, args):", "import Application from prompt_toolkit.layout.containers import VSplit, HSplit, Window from prompt_toolkit.layout.layout import Layout from", "def exit_(event): event.app.exit() def on_help(self, args): ui_instance = UI.instance ui_instance.update_log(ui_instance.help_text + \"\\n\") @kb.add('c-s')", "\", multiline=False, wrap_lines=False, complete_while_typing=True, completer=cmd_completer, accept_handler = self.accept, focus_on_click=True, ) # status window", "call_dest = p2tools.get_section(self.obj_data, call_addr) # else: # call_dest = '' inst = \"", "== -1: self.function_header.text = ANSI(Fore.YELLOW + \"Cog Execution Mode. Set base address with", "UI.instance r = ui_instance.server.breakpoint(args[0]) if r: ui_instance.update_log(r + \"\\n\", Fore.RED) def on_getreg(self, args):", "Style.RESET_ALL self.pins.text = ANSI(pin_str) # update the dissassembly window # get the function", "range(32): bit = '' if (self.server.dira >> i) & 1: color = Fore.RED", "self.on_reset, \"quit\": self.on_quit, \"help\": self.on_help } # log stuff def log_cursor_pos(): y =", "address (for native cogs) continue : (unimplemented) Continue execution. Cog will be disconnected", "ui_instance.server.get_reg(args[0]) if (r[0]): ui_instance.update_log(r[0], Fore.RED) else: try: addr = int(args[0], 16) ui_instance.update_log(\"reg {:#02x}", "- section_addr)/4) else: data_str += ' '*self.pc_cursor_size + inst + Style.RESET_ALL return data_str", "ui_instance = UI.instance r = ui_instance.server.breakpoint(args[0]) if r: ui_instance.update_log(r + \"\\n\", Fore.RED) def", "Style import threading import logging import time import re from . import p2tools", "<addr> : Get the long at hub address 'addr'. Address should be in", "return ui_instance.server.cog_states[ui_instance.server.current_cog].status.set_cog_addr(addr) @kb.add('c-p') def on_pins(self, args=[]): ui_instance = UI.instance ui_instance.server.update_pins() def on_continue(self, args=[]):", "= Fore.LIGHTBLACK_EX porta_str += color + \"{0: <3}\".format(bit) if (self.server.dirb >> i) &", "# call_dest = '' inst = \" {:x}: {}{}{}{}\\n\".format(i, sec[i][0], Fore.LIGHTGREEN_EX, sec[i][1], Fore.RESET)", "are stepped over. Modifier instructions (augd/s, setq) will be skipped. stepin [Ctrl+T] :", "else: try: addr = int(args[0], 16) ui_instance.update_log(\"reg {:#02x} -> {:#02x}\".format(addr, r[1]) + \"\\n\")", "\"cogex\" func_name = '' for sec in self.obj_data: if pc in self.obj_data[sec]: section", "instruction_split = HSplit([ Box(Window(self.function_header, height=1), 1, padding_top=0), Box(Window(self.instructions, height=40), 1) ]) # Frames", "<3}\".format(bit) if (self.server.dirb >> i) & 1: color = Fore.RED + Style.BRIGHT else:", "= KeyBindings() help_text = ''' p2db ---- help : Print this dialog step", "Style.BRIGHT if (self.server.inb >> i) & 1: bit = 'H' else: bit =", "Layout(root_container, self.prompt) self.app = Application(layout=layout, key_bindings=self.kb, full_screen=True, before_render=self.prerender, after_render=self.postrender) self.app.layout.focus(self.prompt_window) @kb.add('c-c') @kb.add('c-q') def", "= '' self.dirty = True self.render_lock = threading.Lock() # dict of commands and", "''' pc_cursor_string = Fore.CYAN + \" ---> \" + Fore.RESET pc_cursor_size = 10", "in hex getreg <reg> : Get the value in 'reg'. 'reg' can be", "pattern to get the address of a call instruction # r = re.search(pat,", "UI.instance r = ui_instance.server.step() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) @kb.add('c-t') def on_stepin(self,", "from prompt_toolkit.completion import WordCompleter from prompt_toolkit.data_structures import Point from prompt_toolkit.formatted_text import ANSI from", "Style.BRIGHT else: color = Fore.LIGHTBLUE_EX + Style.BRIGHT if (self.server.ina >> i) & 1:", "= UI.instance ui_instance.update_log('reset unimplemented\\n') def on_quit(self, args): ui_instance = UI.instance ui_instance.app.exit() def accept(self,", "until it interrupts itself reset : (unimplemented) Reload the current program quit [Ctrl+Q]", "sec, ptr): ''' return a atring for all instructions in a given section.", "= ANSI(self.log.text.value + color + new_text + Fore.RESET) def get_section_str(self, sec, ptr): '''", "update_log(self, new_text, color=\"\"): self.log.text = ANSI(self.log.text.value + color + new_text + Fore.RESET) def", "data while not self.server.log_queue.empty(): c = self.server.log_queue.get() if c != '\\r': self.update_log(c, Fore.LIGHTGREEN_EX)", "= UI.instance r = ui_instance.server.stepout() if r: ui_instance.update_log(r + \"\\n\", Fore.RED) def on_break(self,", "ui_instance = UI.instance ui_instance.server.update_pins() def on_continue(self, args=[]): ui_instance = UI.instance ui_instance.server.continue_exec() def on_reset(self,", "and stat.exec_mode != 'lutex' and stat._cog_exec_base_addr == -1: self.function_header.text = ANSI(Fore.YELLOW + \"Cog", "@kb.add('c-t') def on_stepin(self, args=[]): ui_instance = UI.instance r = ui_instance.server.stepin() if r: ui_instance.update_log(r", "porta_str += color + \"{0: <3}\".format(bit) if (self.server.dirb >> i) & 1: color" ]
[ "VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02/','sub09/EP02_02f/','sub10/EP13_01/','sub17/EP15_01/', 'sub17/EP15_03/','sub19/EP19_04/','sub24/EP10_03/','sub24/EP07_01/', 'sub24/EP07_04f/','sub24/EP02_07/','sub26/EP15_01/'] listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)): if s==0:", "print(np.shape(Test_Y)) if sub==0: for i in range(1,subjects): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) elif sub==subjects-1: for i", "for s in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB== \"CASME2_TIM\": inputDir='/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/'", "dB=\"CASME2_TIM\" rootpath = '/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' if dB == \"CASME2_raw\": inputDir='/media/ice/OS/Datasets/CASME2-RAW/' resizedFlag=1; elif dB== \"CASME2_large\":", "(np.shape(Train_X)) print (np.shape(Test_Y)) print (np.shape(Test_X)) model.fit(Train_X, Train_Y, validation_split=0.05, epochs=1, batch_size=20) model.summary() predict=model.predict_classes(Test_X) ##", "from keras import backend as K from labelling import collectinglabel from reordering import", "expression=[str(x.value) for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) # print(type(table)) r=50; w=50 resizedFlag=1; subjects=26 samples=246", "model.add(LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim))) model.add(LSTM(500,return_sequences=False)) ##model.add(LSTM(500,return_sequences=True)) ##model.add(LSTM(50,return_sequences=False)) model.add(Dense(50,activation='sigmoid')) model.add(Dense(5,activation='sigmoid')) model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=[metrics.categorical_accuracy]) #### generate the", "if sub==subjects-1: # compute the accuracy, F1, P and R from the overall", "excluded subjects=16; n_exp=3; r= 170;w=140; VidPerSubject = [6,6,39,19,2,4,13,4,7,9,10,10,4,7,2,22]; listOfIgnoredSamples=[]; resizedFlag=1; else: print(\"NOT in", "######## Seperating the input files into LOSO CV ######## tot_mat=np.zeros((n_exp,n_exp)) for sub in", "for each input video collectinglabel(table, sub[3:], vid, workplace+'Classification/', dB) for var in range(numFrame):", "else: for i in range(subjects): if sub == i: continue else: Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i])", "delimiter=' ') thewriter.writerow('Sub ' + str(sub+1)) thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in ct: thewriter.writerow(row) thewriter.writerow(order)", "VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02','sub09/EP02_02f','sub10/EP13_01','sub17/EP15_01', 'sub17/EP15_03','sub19/EP19_04','sub24/EP10_03','sub24/EP07_01', 'sub24/EP07_04f','sub24/EP02_07','sub26/EP15_01'] listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)): if s==0:", "as csvfile: thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('Sub ' + str(sub+1)) thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in", "workplace='/media/ice/OS/Datasets/CASME2_TIM/' dB=\"CASME2_TIM\" rootpath = '/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' if dB == \"CASME2_raw\": inputDir='/media/ice/OS/Datasets/CASME2-RAW/' resizedFlag=1; elif dB==", "range(1,subjects): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) elif sub==subjects-1: for i in range(subjects-1): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) else: for", "glob,os import xlrd import cv2 import pandas as pd from sklearn.svm import SVC", "= np.reshape( Train_X, Train_X.shape ) # Train_X = np.reshape(2500, 16077) print(Train_X.shape) Train_Y=np.hstack(Train_Y) Train_Y=np_utils.to_categorical(Train_Y,5)", "data_dim=r*w # 2500 print(data_dim) timesteps=10 # LSTM1 = LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim)) model=Sequential()", "mat[int(order[m]),int(order[n])]=ct[m,n] tot_mat=mat+tot_mat # write each CT of each CV into .txt file if", "colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) subjects=26 samples=246 n_exp=5 resizedFlag=1; r=68; w=56 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02','sub09/EP02_02f','sub10/EP13_01','sub17/EP15_01', 'sub17/EP15_03','sub19/EP19_04','sub24/EP10_03','sub24/EP07_01',", "0 print (predict) print (Test_Y) #compute the ConfusionMat ct=confusion_matrix(Test_Y,predict) #check the order of", "sub == i: continue else: Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) # print(Train_X) # Train_X=np.hstack(Train_X) # print(Train_X.shape)", "changed to hstack from vstack # print(Train_X.shape) # Train_X = Train_X.shape[1:] # print(Train_X.shape)", "model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=[metrics.categorical_accuracy]) #### generate the label based on subjects ######### label=np.loadtxt(workplace+'Classification/'+ dB +'_label.txt') labelperSub=[]", "csvfile: thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('Sub ' + str(sub+1)) thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in ct:", "K from labelling import collectinglabel from reordering import readinput from evaluationmatrix import fpr", "range(subjects-1): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) else: for i in range(subjects): if sub == i: continue", "the input files into LOSO CV ######## tot_mat=np.zeros((n_exp,n_exp)) for sub in range(subjects): Train_X=[]", "sub==0: for i in range(1,subjects): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) elif sub==subjects-1: for i in range(subjects-1):", "predict=model.predict_classes(Test_X) ## predict[predict>= 0.5] = 1 ## predict[predict<0.5] = 0 print (predict) print", "CV ######## tot_mat=np.zeros((n_exp,n_exp)) for sub in range(subjects): Train_X=[] Train_Y=[] Test_X=SubperdB[sub] Test_X=np.array(Test_X) Test_Y=labelperSub[sub] Test_Yy=np_utils.to_categorical(Test_Y,5)", "tot_mat=mat+tot_mat # write each CT of each CV into .txt file if not", "from keras.models import Sequential from keras.layers import LSTM, Dense, TimeDistributed from keras.utils import", "CT order=np.unique(np.concatenate((predict,Test_Y))) #create an array to hold the CT for each CV mat=np.zeros((n_exp,n_exp))", "in listOfIgnoredSamples: continue # print(dB) # print(path) imgList=readinput(path,dB) numFrame=len(imgList) if resizedFlag ==1: col=w", "input files into LOSO CV ######## tot_mat=np.zeros((n_exp,n_exp)) for sub in range(subjects): Train_X=[] Train_Y=[]", "colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x in colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None)", "Seperating the input files into LOSO CV ######## tot_mat=np.zeros((n_exp,n_exp)) for sub in range(subjects):", "for sub in sorted([infile for infile in os.listdir(inputDir)]): VidperSub=[] for vid in sorted([inrfile", "#in resize function, [col,row] img=cv2.resize(img,(col,row)) if var==0: FrameperVid=img.flatten() else: FrameperVid=np.vstack((FrameperVid,img.flatten())) VidperSub.append(FrameperVid) SubperdB.append(VidperSub) #####", "backend as K from labelling import collectinglabel from reordering import readinput from evaluationmatrix", "[6,6,39,19,2,4,13,4,7,9,10,10,4,7,2,22]; listOfIgnoredSamples=[]; resizedFlag=1; else: print(\"NOT in the selection.\") ######### Reading in the input", "from keras.utils import np_utils from keras import metrics from keras import backend as", "if path in listOfIgnoredSamples: continue # print(dB) # print(path) imgList=readinput(path,dB) numFrame=len(imgList) if resizedFlag", "img=cv2.imread(imgList[var]) [_,_,dim]=img.shape if dim ==3: img=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) if resizedFlag ==1: #in resize function, [col,row]", "colm] colm=ws.col_slice(colx=2,start_rowx=1,end_rowx=None) expression=[int(x.value) for x in colm] table=np.transpose(np.array([np.array(vidName),np.array(expression)],dtype=str)) samples=164; #6 samples are excluded", "# Train_X = np.reshape( Train_X, Train_X.shape ) # Train_X = np.reshape(2500, 16077) print(Train_X.shape)", "sklearn.svm import SVC from collections import Counter from sklearn.metrics import confusion_matrix import scipy.io", "sub in sorted([infile for infile in os.listdir(inputDir)]): VidperSub=[] for vid in sorted([inrfile for", "# Train_X = Train_X.shape[1:] # print(Train_X.shape) # Train_X = np.expand_dims(Train_X, axis=2) # Train_X", "xlrd import cv2 import pandas as pd from sklearn.svm import SVC from collections", "np.reshape( Train_X, Train_X.shape ) # Train_X = np.reshape(2500, 16077) print(Train_X.shape) Train_Y=np.hstack(Train_Y) Train_Y=np_utils.to_categorical(Train_Y,5) print", "w=56 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02','sub09/EP02_02f','sub10/EP13_01','sub17/EP15_01', 'sub17/EP15_03','sub19/EP19_04','sub24/EP10_03','sub24/EP07_01', 'sub24/EP07_04f','sub24/EP02_07','sub26/EP15_01'] listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)): if", "Train_Y=np_utils.to_categorical(Train_Y,5) print (np.shape(Train_Y)) print (np.shape(Train_X)) print (np.shape(Test_Y)) print (np.shape(Test_X)) model.fit(Train_X, Train_Y, validation_split=0.05, epochs=1,", "in range(len(order)): for n in range(len(order)): mat[int(order[m]),int(order[n])]=ct[m,n] tot_mat=mat+tot_mat # write each CT of", "SVC from collections import Counter from sklearn.metrics import confusion_matrix import scipy.io as sio", ") # Train_X = np.reshape( Train_X, Train_X.shape ) # Train_X = np.reshape(2500, 16077)", "colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for x in colm]", "(np.shape(Test_X)) model.fit(Train_X, Train_Y, validation_split=0.05, epochs=1, batch_size=20) model.summary() predict=model.predict_classes(Test_X) ## predict[predict>= 0.5] = 1", "model.fit(Train_X, Train_Y, validation_split=0.05, epochs=1, batch_size=20) model.summary() predict=model.predict_classes(Test_X) ## predict[predict>= 0.5] = 1 ##", "table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) subjects=26 samples=246 n_exp=5 resizedFlag=1; r=68; w=56 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02','sub09/EP02_02f','sub10/EP13_01','sub17/EP15_01', 'sub17/EP15_03','sub19/EP19_04','sub24/EP10_03','sub24/EP07_01', 'sub24/EP07_04f','sub24/EP02_07','sub26/EP15_01']", "order=np.unique(np.concatenate((predict,Test_Y))) #create an array to hold the CT for each CV mat=np.zeros((n_exp,n_exp)) #put", "s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB== \"CASME2_TIM\": inputDir='/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' #replace with croppoed for testing", "i in range(1,subjects): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) elif sub==subjects-1: for i in range(subjects-1): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i])", "to hold the CT for each CV mat=np.zeros((n_exp,n_exp)) #put the order accordingly, in", "CT for each CV mat=np.zeros((n_exp,n_exp)) #put the order accordingly, in order to form", "[row,col,_l]=img.shape ## ##read the label for each input video collectinglabel(table, sub[3:], vid, workplace+'Classification/',", "(Test_Y) #compute the ConfusionMat ct=confusion_matrix(Test_Y,predict) #check the order of the CT order=np.unique(np.concatenate((predict,Test_Y))) #create", "listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB == \"SMIC\": inputDir=\"/srv/oyh/DataBase/SMIC/HS_naming_modified/\" wb=xlrd.open_workbook('/srv/oyh/DataBase/SMIC_label.xlsx'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for", "predict[predict>= 0.5] = 1 ## predict[predict<0.5] = 0 print (predict) print (Test_Y) #compute", "x in colm] table=np.transpose(np.array([np.array(vidName),np.array(expression)],dtype=str)) samples=164; #6 samples are excluded subjects=16; n_exp=3; r= 170;w=140;", "keras.layers import LSTM, Dense, TimeDistributed from keras.utils import np_utils from keras import metrics", "else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB == \"SMIC\": inputDir=\"/srv/oyh/DataBase/SMIC/HS_naming_modified/\" wb=xlrd.open_workbook('/srv/oyh/DataBase/SMIC_label.xlsx'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x", "file with open(workplace+'Classification/'+ 'Result/'+dB+'/final_CT.txt','w') as csvfile: thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in tot_mat: thewriter.writerow(row) thewriter=csv.writer(csvfile,", "from keras import metrics from keras import backend as K from labelling import", "keras.utils import np_utils from keras import metrics from keras import backend as K", "sub + '/'+ vid + '/' if path in listOfIgnoredSamples: continue # print(dB)", "overall CT microAcc=np.trace(tot_mat)/np.sum(tot_mat) [f1,p,r]=fpr(tot_mat,n_exp) # save into a .txt file with open(workplace+'Classification/'+ 'Result/'+dB+'/final_CT.txt','w')", "for n in range(len(order)): mat[int(order[m]),int(order[n])]=ct[m,n] tot_mat=mat+tot_mat # write each CT of each CV", "numVid=VidPerSubject[sub] labelperSub.append(label[counter:counter+numVid]) counter = counter + numVid ##print(np.shape(labelperSub[1])) ##print(labelperSub[1]) ######## Seperating the input", "Setting up the LSTM model ######## data_dim=r*w # 2500 print(data_dim) timesteps=10 # LSTM1", "#check the order of the CT order=np.unique(np.concatenate((predict,Test_Y))) #create an array to hold the", "model.add(Dense(50,activation='sigmoid')) model.add(Dense(5,activation='sigmoid')) model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=[metrics.categorical_accuracy]) #### generate the label based on subjects ######### label=np.loadtxt(workplace+'Classification/'+ dB", "Train_Y.append(labelperSub[i]) # print(Train_X) # Train_X=np.hstack(Train_X) # print(Train_X.shape) Train_X=np.vstack(Train_X) # changed to hstack from", "'sub17/EP15_03','sub19/EP19_04','sub24/EP10_03','sub24/EP07_01', 'sub24/EP07_04f','sub24/EP02_07','sub26/EP15_01'] listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif", "(np.shape(Train_Y)) print (np.shape(Train_X)) print (np.shape(Test_Y)) print (np.shape(Test_X)) model.fit(Train_X, Train_Y, validation_split=0.05, epochs=1, batch_size=20) model.summary()", "m in range(len(order)): for n in range(len(order)): mat[int(order[m]),int(order[n])]=ct[m,n] tot_mat=mat+tot_mat # write each CT", "w=50 resizedFlag=1; subjects=26 samples=246 n_exp=5 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02/','sub09/EP02_02f/','sub10/EP13_01/','sub17/EP15_01/', 'sub17/EP15_03/','sub19/EP19_04/','sub24/EP10_03/','sub24/EP07_01/', 'sub24/EP07_04f/','sub24/EP02_07/','sub26/EP15_01/'] listOfIgnoredSamples=[] for", "import csv import glob,os import xlrd import cv2 import pandas as pd from", "print (np.shape(Train_Y)) print (np.shape(Train_X)) print (np.shape(Test_Y)) print (np.shape(Test_X)) model.fit(Train_X, Train_Y, validation_split=0.05, epochs=1, batch_size=20)", "## predict[predict>= 0.5] = 1 ## predict[predict<0.5] = 0 print (predict) print (Test_Y)", "microAcc=np.trace(tot_mat)/np.sum(tot_mat) [f1,p,r]=fpr(tot_mat,n_exp) # save into a .txt file with open(workplace+'Classification/'+ 'Result/'+dB+'/final_CT.txt','w') as csvfile:", "for vid in sorted([inrfile for inrfile in os.listdir(inputDir+sub)]): path=inputDir + sub + '/'+", "in colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) # print(type(table)) r=50; w=50", "if sub == i: continue else: Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) # print(Train_X) # Train_X=np.hstack(Train_X) #", "print (np.shape(Test_X)) model.fit(Train_X, Train_Y, validation_split=0.05, epochs=1, batch_size=20) model.summary() predict=model.predict_classes(Test_X) ## predict[predict>= 0.5] =", "in sorted([infile for infile in os.listdir(inputDir)]): VidperSub=[] for vid in sorted([inrfile for inrfile", "x in colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for x", "accuracy, F1, P and R from the overall CT microAcc=np.trace(tot_mat)/np.sum(tot_mat) [f1,p,r]=fpr(tot_mat,n_exp) # save", "a .txt file with open(workplace+'Classification/'+ 'Result/'+dB+'/final_CT.txt','w') as csvfile: thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in tot_mat:", "model=Sequential() # model.add(TimeDistributed(Dense(data_dim), input_shape=(timesteps, data_dim))) model.add(LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim))) model.add(LSTM(500,return_sequences=False)) ##model.add(LSTM(500,return_sequences=True)) ##model.add(LSTM(50,return_sequences=False)) model.add(Dense(50,activation='sigmoid'))", "listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB == \"SMIC\": inputDir=\"/srv/oyh/DataBase/SMIC/HS_naming_modified/\" wb=xlrd.open_workbook('/srv/oyh/DataBase/SMIC_label.xlsx'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in", "input video collectinglabel(table, sub[3:], vid, workplace+'Classification/', dB) for var in range(numFrame): img=cv2.imread(imgList[var]) [_,_,dim]=img.shape", "data_dim)) model=Sequential() # model.add(TimeDistributed(Dense(data_dim), input_shape=(timesteps, data_dim))) model.add(LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim))) model.add(LSTM(500,return_sequences=False)) ##model.add(LSTM(500,return_sequences=True)) ##model.add(LSTM(50,return_sequences=False))", "in range(subjects): numVid=VidPerSubject[sub] labelperSub.append(label[counter:counter+numVid]) counter = counter + numVid ##print(np.shape(labelperSub[1])) ##print(labelperSub[1]) ######## Seperating", "as sio from keras.models import Sequential from keras.layers import LSTM, Dense, TimeDistributed from", "var in range(numFrame): img=cv2.imread(imgList[var]) [_,_,dim]=img.shape if dim ==3: img=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) if resizedFlag ==1: #in", "import sys import math import operator import csv import glob,os import xlrd import", "samples=246 n_exp=5 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02/','sub09/EP02_02f/','sub10/EP13_01/','sub17/EP15_01/', 'sub17/EP15_03/','sub19/EP19_04/','sub24/EP10_03/','sub24/EP07_01/', 'sub24/EP07_04f/','sub24/EP02_07/','sub26/EP15_01/'] listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)):", "compute the accuracy, F1, P and R from the overall CT microAcc=np.trace(tot_mat)/np.sum(tot_mat) [f1,p,r]=fpr(tot_mat,n_exp)", "import SVC from collections import Counter from sklearn.metrics import confusion_matrix import scipy.io as", "'Result/'+dB+'/') with open(workplace+'Classification/'+ 'Result/'+dB+'/sub_CT.txt','a') as csvfile: thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('Sub ' + str(sub+1))", "print(Train_X.shape) # Train_X = Train_X.shape[1:] # print(Train_X.shape) # Train_X = np.expand_dims(Train_X, axis=2) #", "not os.path.exists(workplace+'Classification/'+'Result/'+dB+'/'): os.mkdir(workplace+'Classification/'+ 'Result/'+dB+'/') with open(workplace+'Classification/'+ 'Result/'+dB+'/sub_CT.txt','a') as csvfile: thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('Sub", "fpr workplace='/media/ice/OS/Datasets/CASME2_TIM/' dB=\"CASME2_TIM\" rootpath = '/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' if dB == \"CASME2_raw\": inputDir='/media/ice/OS/Datasets/CASME2-RAW/' resizedFlag=1; elif", "= [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02/','sub09/EP02_02f/','sub10/EP13_01/','sub17/EP15_01/', 'sub17/EP15_03/','sub19/EP19_04/','sub24/EP10_03/','sub24/EP07_01/', 'sub24/EP07_04f/','sub24/EP02_07/','sub26/EP15_01/'] listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]]", "#put the order accordingly, in order to form the overall ConfusionMat for m", "listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB ==", "+ sub + '/'+ vid + '/' if path in listOfIgnoredSamples: continue #", "in os.listdir(inputDir+sub)]): path=inputDir + sub + '/'+ vid + '/' if path in", "'sub17/EP15_03/','sub19/EP19_04/','sub24/EP10_03/','sub24/EP07_01/', 'sub24/EP07_04f/','sub24/EP02_07/','sub26/EP15_01/'] listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif", "######### Reading in the input images ######## SubperdB=[] for sub in sorted([infile for", "Reading in the input images ######## SubperdB=[] for sub in sorted([infile for infile", "elif sub==subjects-1: for i in range(subjects-1): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) else: for i in range(subjects):", "== \"SMIC\": inputDir=\"/srv/oyh/DataBase/SMIC/HS_naming_modified/\" wb=xlrd.open_workbook('/srv/oyh/DataBase/SMIC_label.xlsx'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=2,start_rowx=1,end_rowx=None) expression=[int(x.value)", "range(len(order)): for n in range(len(order)): mat[int(order[m]),int(order[n])]=ct[m,n] tot_mat=mat+tot_mat # write each CT of each", "for infile in os.listdir(inputDir)]): VidperSub=[] for vid in sorted([inrfile for inrfile in os.listdir(inputDir+sub)]):", "'sub24/EP07_04f','sub24/EP02_07','sub26/EP15_01'] listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB==", "subjects=16; n_exp=3; r= 170;w=140; VidPerSubject = [6,6,39,19,2,4,13,4,7,9,10,10,4,7,2,22]; listOfIgnoredSamples=[]; resizedFlag=1; else: print(\"NOT in the", "# Train_X = np.expand_dims(Train_X, axis=2) # Train_X = np.reshape(Train_X, Train_X.shape + (1, 1,)", "'Result/'+dB+'/sub_CT.txt','a') as csvfile: thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('Sub ' + str(sub+1)) thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row", "inputDir='/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' #replace with croppoed for testing wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x in", "[_,_,dim]=img.shape if dim ==3: img=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) if resizedFlag ==1: #in resize function, [col,row] img=cv2.resize(img,(col,row))", "if resizedFlag ==1: col=w row=r else: img=cv2.imread(imgList[0]) [row,col,_l]=img.shape ## ##read the label for", "colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=2,start_rowx=1,end_rowx=None) expression=[int(x.value) for x in colm] table=np.transpose(np.array([np.array(vidName),np.array(expression)],dtype=str))", "+'_label.txt') labelperSub=[] counter = 0 for sub in range(subjects): numVid=VidPerSubject[sub] labelperSub.append(label[counter:counter+numVid]) counter =", "2/' wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME 2/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x in colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for", "each CT of each CV into .txt file if not os.path.exists(workplace+'Classification/'+'Result/'+dB+'/'): os.mkdir(workplace+'Classification/'+ 'Result/'+dB+'/')", "from the overall CT microAcc=np.trace(tot_mat)/np.sum(tot_mat) [f1,p,r]=fpr(tot_mat,n_exp) # save into a .txt file with", "collections import Counter from sklearn.metrics import confusion_matrix import scipy.io as sio from keras.models", "in range(numFrame): img=cv2.imread(imgList[var]) [_,_,dim]=img.shape if dim ==3: img=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) if resizedFlag ==1: #in resize", "CT of each CV into .txt file if not os.path.exists(workplace+'Classification/'+'Result/'+dB+'/'): os.mkdir(workplace+'Classification/'+ 'Result/'+dB+'/') with", "+ (1, 1,) ) # Train_X = np.reshape( Train_X, Train_X.shape ) # Train_X", "in the selection.\") ######### Reading in the input images ######## SubperdB=[] for sub", "sklearn.metrics import confusion_matrix import scipy.io as sio from keras.models import Sequential from keras.layers", "mat=np.zeros((n_exp,n_exp)) #put the order accordingly, in order to form the overall ConfusionMat for", "i in range(subjects-1): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) else: for i in range(subjects): if sub ==", "from evaluationmatrix import fpr workplace='/media/ice/OS/Datasets/CASME2_TIM/' dB=\"CASME2_TIM\" rootpath = '/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' if dB == \"CASME2_raw\":", "resizedFlag=1; r=68; w=56 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02','sub09/EP02_02f','sub10/EP13_01','sub17/EP15_01', 'sub17/EP15_03','sub19/EP19_04','sub24/EP10_03','sub24/EP07_01', 'sub24/EP07_04f','sub24/EP02_07','sub26/EP15_01'] listOfIgnoredSamples=[] for s in", "data_dim))) model.add(LSTM(500,return_sequences=False)) ##model.add(LSTM(500,return_sequences=True)) ##model.add(LSTM(50,return_sequences=False)) model.add(Dense(50,activation='sigmoid')) model.add(Dense(5,activation='sigmoid')) model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=[metrics.categorical_accuracy]) #### generate the label based on", "######## tot_mat=np.zeros((n_exp,n_exp)) for sub in range(subjects): Train_X=[] Train_Y=[] Test_X=SubperdB[sub] Test_X=np.array(Test_X) Test_Y=labelperSub[sub] Test_Yy=np_utils.to_categorical(Test_Y,5) print(Test_Y)", ") # Train_X = np.reshape(2500, 16077) print(Train_X.shape) Train_Y=np.hstack(Train_Y) Train_Y=np_utils.to_categorical(Train_Y,5) print (np.shape(Train_Y)) print (np.shape(Train_X))", "n_exp=5 resizedFlag=1; r=68; w=56 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02','sub09/EP02_02f','sub10/EP13_01','sub17/EP15_01', 'sub17/EP15_03','sub19/EP19_04','sub24/EP10_03','sub24/EP07_01', 'sub24/EP07_04f','sub24/EP02_07','sub26/EP15_01'] listOfIgnoredSamples=[] for s", "FrameperVid=img.flatten() else: FrameperVid=np.vstack((FrameperVid,img.flatten())) VidperSub.append(FrameperVid) SubperdB.append(VidperSub) ##### Setting up the LSTM model ######## data_dim=r*w", "infile in os.listdir(inputDir)]): VidperSub=[] for vid in sorted([inrfile for inrfile in os.listdir(inputDir+sub)]): path=inputDir", "print(Train_X.shape) Train_Y=np.hstack(Train_Y) Train_Y=np_utils.to_categorical(Train_Y,5) print (np.shape(Train_Y)) print (np.shape(Train_X)) print (np.shape(Test_Y)) print (np.shape(Test_X)) model.fit(Train_X, Train_Y,", "write each CT of each CV into .txt file if not os.path.exists(workplace+'Classification/'+'Result/'+dB+'/'): os.mkdir(workplace+'Classification/'+", "order to form the overall ConfusionMat for m in range(len(order)): for n in", "the LSTM model ######## data_dim=r*w # 2500 print(data_dim) timesteps=10 # LSTM1 = LSTM(2500,", "with open(workplace+'Classification/'+ 'Result/'+dB+'/sub_CT.txt','a') as csvfile: thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('Sub ' + str(sub+1)) thewriter=csv.writer(csvfile,dialect=csv.excel_tab)", "thewriter.writerow(row) thewriter.writerow(order) thewriter.writerow('\\n') if sub==subjects-1: # compute the accuracy, F1, P and R", "return_sequences=True, input_shape=(timesteps, data_dim)) model=Sequential() # model.add(TimeDistributed(Dense(data_dim), input_shape=(timesteps, data_dim))) model.add(LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim))) model.add(LSTM(500,return_sequences=False))", "vstack # print(Train_X.shape) # Train_X = Train_X.shape[1:] # print(Train_X.shape) # Train_X = np.expand_dims(Train_X,", "LSTM1 = LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim)) model=Sequential() # model.add(TimeDistributed(Dense(data_dim), input_shape=(timesteps, data_dim))) model.add(LSTM(2500, return_sequences=True,", "wb=xlrd.open_workbook('/srv/oyh/DataBase/SMIC_label.xlsx'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=2,start_rowx=1,end_rowx=None) expression=[int(x.value) for x in", "np import sys import math import operator import csv import glob,os import xlrd", "s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB == \"SMIC\": inputDir=\"/srv/oyh/DataBase/SMIC/HS_naming_modified/\" wb=xlrd.open_workbook('/srv/oyh/DataBase/SMIC_label.xlsx'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value)", "P and R from the overall CT microAcc=np.trace(tot_mat)/np.sum(tot_mat) [f1,p,r]=fpr(tot_mat,n_exp) # save into a", "import scipy.io as sio from keras.models import Sequential from keras.layers import LSTM, Dense,", "label based on subjects ######### label=np.loadtxt(workplace+'Classification/'+ dB +'_label.txt') labelperSub=[] counter = 0 for", "col=w row=r else: img=cv2.imread(imgList[0]) [row,col,_l]=img.shape ## ##read the label for each input video", "#create an array to hold the CT for each CV mat=np.zeros((n_exp,n_exp)) #put the", "1,) ) # Train_X = np.reshape( Train_X, Train_X.shape ) # Train_X = np.reshape(2500,", "r=50; w=50 resizedFlag=1; subjects=26 samples=246 n_exp=5 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02/','sub09/EP02_02f/','sub10/EP13_01/','sub17/EP15_01/', 'sub17/EP15_03/','sub19/EP19_04/','sub24/EP10_03/','sub24/EP07_01/', 'sub24/EP07_04f/','sub24/EP02_07/','sub26/EP15_01/'] listOfIgnoredSamples=[]", "resizedFlag ==1: #in resize function, [col,row] img=cv2.resize(img,(col,row)) if var==0: FrameperVid=img.flatten() else: FrameperVid=np.vstack((FrameperVid,img.flatten())) VidperSub.append(FrameperVid)", "model.summary() predict=model.predict_classes(Test_X) ## predict[predict>= 0.5] = 1 ## predict[predict<0.5] = 0 print (predict)", "= np.reshape(Train_X, Train_X.shape + (1, 1,) ) # Train_X = np.reshape( Train_X, Train_X.shape", "os.path.exists(workplace+'Classification/'+'Result/'+dB+'/'): os.mkdir(workplace+'Classification/'+ 'Result/'+dB+'/') with open(workplace+'Classification/'+ 'Result/'+dB+'/sub_CT.txt','a') as csvfile: thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('Sub '", "Train_X=[] Train_Y=[] Test_X=SubperdB[sub] Test_X=np.array(Test_X) Test_Y=labelperSub[sub] Test_Yy=np_utils.to_categorical(Test_Y,5) print(Test_Y) ## print(np.shape(Test_Y)) if sub==0: for i", "batch_size=20) model.summary() predict=model.predict_classes(Test_X) ## predict[predict>= 0.5] = 1 ## predict[predict<0.5] = 0 print", "samples are excluded subjects=16; n_exp=3; r= 170;w=140; VidPerSubject = [6,6,39,19,2,4,13,4,7,9,10,10,4,7,2,22]; listOfIgnoredSamples=[]; resizedFlag=1; else:", "Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) else: for i in range(subjects): if sub == i: continue else:", "subjects=26 samples=246 n_exp=5 resizedFlag=1; r=68; w=56 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02','sub09/EP02_02f','sub10/EP13_01','sub17/EP15_01', 'sub17/EP15_03','sub19/EP19_04','sub24/EP10_03','sub24/EP07_01', 'sub24/EP07_04f','sub24/EP02_07','sub26/EP15_01'] listOfIgnoredSamples=[]", "IgnoredSamples=['sub09/EP13_02/','sub09/EP02_02f/','sub10/EP13_01/','sub17/EP15_01/', 'sub17/EP15_03/','sub19/EP19_04/','sub24/EP10_03/','sub24/EP07_01/', 'sub24/EP07_04f/','sub24/EP02_07/','sub26/EP15_01/'] listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s])", "elif dB == \"SMIC\": inputDir=\"/srv/oyh/DataBase/SMIC/HS_naming_modified/\" wb=xlrd.open_workbook('/srv/oyh/DataBase/SMIC_label.xlsx'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in colm]", "data_dim))) model.add(LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim))) model.add(LSTM(500,return_sequences=False)) ##model.add(LSTM(500,return_sequences=True)) ##model.add(LSTM(50,return_sequences=False)) model.add(Dense(50,activation='sigmoid')) model.add(Dense(5,activation='sigmoid')) model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=[metrics.categorical_accuracy]) #### generate", "######### label=np.loadtxt(workplace+'Classification/'+ dB +'_label.txt') labelperSub=[] counter = 0 for sub in range(subjects): numVid=VidPerSubject[sub]", "dB == \"CASME2_raw\": inputDir='/media/ice/OS/Datasets/CASME2-RAW/' resizedFlag=1; elif dB== \"CASME2_large\": inputDir='/media/ice/OS/Datasets/CASME 2/' wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME 2/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0)", "0 for sub in range(subjects): numVid=VidPerSubject[sub] labelperSub.append(label[counter:counter+numVid]) counter = counter + numVid ##print(np.shape(labelperSub[1]))", "for m in range(len(order)): for n in range(len(order)): mat[int(order[m]),int(order[n])]=ct[m,n] tot_mat=mat+tot_mat # write each", "import numpy as np import sys import math import operator import csv import", "label for each input video collectinglabel(table, sub[3:], vid, workplace+'Classification/', dB) for var in", "reordering import readinput from evaluationmatrix import fpr workplace='/media/ice/OS/Datasets/CASME2_TIM/' dB=\"CASME2_TIM\" rootpath = '/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' if", "## print(np.shape(Test_Y)) if sub==0: for i in range(1,subjects): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) elif sub==subjects-1: for", "expression=[str(x.value) for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) subjects=26 samples=246 n_exp=5 resizedFlag=1; r=68; w=56 VidPerSubject", "# print(path) imgList=readinput(path,dB) numFrame=len(imgList) if resizedFlag ==1: col=w row=r else: img=cv2.imread(imgList[0]) [row,col,_l]=img.shape ##", "subjects=26 samples=246 n_exp=5 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02/','sub09/EP02_02f/','sub10/EP13_01/','sub17/EP15_01/', 'sub17/EP15_03/','sub19/EP19_04/','sub24/EP10_03/','sub24/EP07_01/', 'sub24/EP07_04f/','sub24/EP02_07/','sub26/EP15_01/'] listOfIgnoredSamples=[] for s in", "evaluationmatrix import fpr workplace='/media/ice/OS/Datasets/CASME2_TIM/' dB=\"CASME2_TIM\" rootpath = '/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' if dB == \"CASME2_raw\": inputDir='/media/ice/OS/Datasets/CASME2-RAW/'", "array to hold the CT for each CV mat=np.zeros((n_exp,n_exp)) #put the order accordingly,", "' + str(sub+1)) thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in ct: thewriter.writerow(row) thewriter.writerow(order) thewriter.writerow('\\n') if sub==subjects-1:", "CV mat=np.zeros((n_exp,n_exp)) #put the order accordingly, in order to form the overall ConfusionMat", "colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) subjects=26 samples=246 n_exp=5 resizedFlag=1; r=68; w=56", "selection.\") ######### Reading in the input images ######## SubperdB=[] for sub in sorted([infile", "CV into .txt file if not os.path.exists(workplace+'Classification/'+'Result/'+dB+'/'): os.mkdir(workplace+'Classification/'+ 'Result/'+dB+'/') with open(workplace+'Classification/'+ 'Result/'+dB+'/sub_CT.txt','a') as", "for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) # print(type(table)) r=50; w=50 resizedFlag=1; subjects=26 samples=246 n_exp=5", "os.listdir(inputDir+sub)]): path=inputDir + sub + '/'+ vid + '/' if path in listOfIgnoredSamples:", "inputDir=\"/srv/oyh/DataBase/SMIC/HS_naming_modified/\" wb=xlrd.open_workbook('/srv/oyh/DataBase/SMIC_label.xlsx'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=2,start_rowx=1,end_rowx=None) expression=[int(x.value) for x", "print (np.shape(Test_Y)) print (np.shape(Test_X)) model.fit(Train_X, Train_Y, validation_split=0.05, epochs=1, batch_size=20) model.summary() predict=model.predict_classes(Test_X) ## predict[predict>=", "keras import metrics from keras import backend as K from labelling import collectinglabel", "FrameperVid=np.vstack((FrameperVid,img.flatten())) VidperSub.append(FrameperVid) SubperdB.append(VidperSub) ##### Setting up the LSTM model ######## data_dim=r*w # 2500", "up the LSTM model ######## data_dim=r*w # 2500 print(data_dim) timesteps=10 # LSTM1 =", "in ct: thewriter.writerow(row) thewriter.writerow(order) thewriter.writerow('\\n') if sub==subjects-1: # compute the accuracy, F1, P", "## predict[predict<0.5] = 0 print (predict) print (Test_Y) #compute the ConfusionMat ct=confusion_matrix(Test_Y,predict) #check", "img=cv2.resize(img,(col,row)) if var==0: FrameperVid=img.flatten() else: FrameperVid=np.vstack((FrameperVid,img.flatten())) VidperSub.append(FrameperVid) SubperdB.append(VidperSub) ##### Setting up the LSTM", "= np.expand_dims(Train_X, axis=2) # Train_X = np.reshape(Train_X, Train_X.shape + (1, 1,) ) #", "the order accordingly, in order to form the overall ConfusionMat for m in", "listOfIgnoredSamples: continue # print(dB) # print(path) imgList=readinput(path,dB) numFrame=len(imgList) if resizedFlag ==1: col=w row=r", "thewriter.writerow(row) thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('micro:' + str(microAcc)) thewriter.writerow('F1:' + str(f1)) thewriter.writerow('Precision:' + str(p))", "LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim)) model=Sequential() # model.add(TimeDistributed(Dense(data_dim), input_shape=(timesteps, data_dim))) model.add(LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim)))", "') thewriter.writerow('Sub ' + str(sub+1)) thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in ct: thewriter.writerow(row) thewriter.writerow(order) thewriter.writerow('\\n')", "for sub in range(subjects): numVid=VidPerSubject[sub] labelperSub.append(label[counter:counter+numVid]) counter = counter + numVid ##print(np.shape(labelperSub[1])) ##print(labelperSub[1])", "from vstack # print(Train_X.shape) # Train_X = Train_X.shape[1:] # print(Train_X.shape) # Train_X =", "model.add(LSTM(500,return_sequences=False)) ##model.add(LSTM(500,return_sequences=True)) ##model.add(LSTM(50,return_sequences=False)) model.add(Dense(50,activation='sigmoid')) model.add(Dense(5,activation='sigmoid')) model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=[metrics.categorical_accuracy]) #### generate the label based on subjects", "\"CASME2_TIM\": inputDir='/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' #replace with croppoed for testing wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x", "for row in tot_mat: thewriter.writerow(row) thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('micro:' + str(microAcc)) thewriter.writerow('F1:' +", "+ '/'+ vid + '/' if path in listOfIgnoredSamples: continue # print(dB) #", "the order of the CT order=np.unique(np.concatenate((predict,Test_Y))) #create an array to hold the CT", "sub==subjects-1: # compute the accuracy, F1, P and R from the overall CT", "pd from sklearn.svm import SVC from collections import Counter from sklearn.metrics import confusion_matrix", "Train_X, Train_X.shape ) # Train_X = np.reshape(2500, 16077) print(Train_X.shape) Train_Y=np.hstack(Train_Y) Train_Y=np_utils.to_categorical(Train_Y,5) print (np.shape(Train_Y))", "resize function, [col,row] img=cv2.resize(img,(col,row)) if var==0: FrameperVid=img.flatten() else: FrameperVid=np.vstack((FrameperVid,img.flatten())) VidperSub.append(FrameperVid) SubperdB.append(VidperSub) ##### Setting", "VidPerSubject = [6,6,39,19,2,4,13,4,7,9,10,10,4,7,2,22]; listOfIgnoredSamples=[]; resizedFlag=1; else: print(\"NOT in the selection.\") ######### Reading in", "vid + '/' if path in listOfIgnoredSamples: continue # print(dB) # print(path) imgList=readinput(path,dB)", "'/' if path in listOfIgnoredSamples: continue # print(dB) # print(path) imgList=readinput(path,dB) numFrame=len(imgList) if", "vid, workplace+'Classification/', dB) for var in range(numFrame): img=cv2.imread(imgList[var]) [_,_,dim]=img.shape if dim ==3: img=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)", "print (np.shape(Train_X)) print (np.shape(Test_Y)) print (np.shape(Test_X)) model.fit(Train_X, Train_Y, validation_split=0.05, epochs=1, batch_size=20) model.summary() predict=model.predict_classes(Test_X)", "2500 print(data_dim) timesteps=10 # LSTM1 = LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim)) model=Sequential() # model.add(TimeDistributed(Dense(data_dim),", "import pandas as pd from sklearn.svm import SVC from collections import Counter from", "for i in range(subjects-1): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) else: for i in range(subjects): if sub", "ct=confusion_matrix(Test_Y,predict) #check the order of the CT order=np.unique(np.concatenate((predict,Test_Y))) #create an array to hold", "and R from the overall CT microAcc=np.trace(tot_mat)/np.sum(tot_mat) [f1,p,r]=fpr(tot_mat,n_exp) # save into a .txt", "in colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for x in", "in colm] colm=ws.col_slice(colx=2,start_rowx=1,end_rowx=None) expression=[int(x.value) for x in colm] table=np.transpose(np.array([np.array(vidName),np.array(expression)],dtype=str)) samples=164; #6 samples are", "from keras.layers import LSTM, Dense, TimeDistributed from keras.utils import np_utils from keras import", "import backend as K from labelling import collectinglabel from reordering import readinput from", "ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x in colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in colm]", "for testing wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x in colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for", "170;w=140; VidPerSubject = [6,6,39,19,2,4,13,4,7,9,10,10,4,7,2,22]; listOfIgnoredSamples=[]; resizedFlag=1; else: print(\"NOT in the selection.\") ######### Reading", "for x in colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for", "path=inputDir + sub + '/'+ vid + '/' if path in listOfIgnoredSamples: continue", "the CT for each CV mat=np.zeros((n_exp,n_exp)) #put the order accordingly, in order to", "IgnoredSamples=['sub09/EP13_02','sub09/EP02_02f','sub10/EP13_01','sub17/EP15_01', 'sub17/EP15_03','sub19/EP19_04','sub24/EP10_03','sub24/EP07_01', 'sub24/EP07_04f','sub24/EP02_07','sub26/EP15_01'] listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s])", "vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=2,start_rowx=1,end_rowx=None) expression=[int(x.value) for x in colm] table=np.transpose(np.array([np.array(vidName),np.array(expression)],dtype=str)) samples=164;", "wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x in colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in", "print(path) imgList=readinput(path,dB) numFrame=len(imgList) if resizedFlag ==1: col=w row=r else: img=cv2.imread(imgList[0]) [row,col,_l]=img.shape ## ##read", "img=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) if resizedFlag ==1: #in resize function, [col,row] img=cv2.resize(img,(col,row)) if var==0: FrameperVid=img.flatten() else:", "on subjects ######### label=np.loadtxt(workplace+'Classification/'+ dB +'_label.txt') labelperSub=[] counter = 0 for sub in", "'/'+ vid + '/' if path in listOfIgnoredSamples: continue # print(dB) # print(path)", "# write each CT of each CV into .txt file if not os.path.exists(workplace+'Classification/'+'Result/'+dB+'/'):", "expression=[int(x.value) for x in colm] table=np.transpose(np.array([np.array(vidName),np.array(expression)],dtype=str)) samples=164; #6 samples are excluded subjects=16; n_exp=3;", "sorted([inrfile for inrfile in os.listdir(inputDir+sub)]): path=inputDir + sub + '/'+ vid + '/'", "##model.add(LSTM(50,return_sequences=False)) model.add(Dense(50,activation='sigmoid')) model.add(Dense(5,activation='sigmoid')) model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=[metrics.categorical_accuracy]) #### generate the label based on subjects ######### label=np.loadtxt(workplace+'Classification/'+", "rootpath = '/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' if dB == \"CASME2_raw\": inputDir='/media/ice/OS/Datasets/CASME2-RAW/' resizedFlag=1; elif dB== \"CASME2_large\": inputDir='/media/ice/OS/Datasets/CASME", "s in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB == \"SMIC\": inputDir=\"/srv/oyh/DataBase/SMIC/HS_naming_modified/\"", "import cv2 import pandas as pd from sklearn.svm import SVC from collections import", "in colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) subjects=26 samples=246 n_exp=5 resizedFlag=1;", "based on subjects ######### label=np.loadtxt(workplace+'Classification/'+ dB +'_label.txt') labelperSub=[] counter = 0 for sub", "print(Test_Y) ## print(np.shape(Test_Y)) if sub==0: for i in range(1,subjects): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) elif sub==subjects-1:", "import math import operator import csv import glob,os import xlrd import cv2 import", "os.listdir(inputDir)]): VidperSub=[] for vid in sorted([inrfile for inrfile in os.listdir(inputDir+sub)]): path=inputDir + sub", "if not os.path.exists(workplace+'Classification/'+'Result/'+dB+'/'): os.mkdir(workplace+'Classification/'+ 'Result/'+dB+'/') with open(workplace+'Classification/'+ 'Result/'+dB+'/sub_CT.txt','a') as csvfile: thewriter=csv.writer(csvfile, delimiter=' ')", "==3: img=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) if resizedFlag ==1: #in resize function, [col,row] img=cv2.resize(img,(col,row)) if var==0: FrameperVid=img.flatten()", "\"SMIC\": inputDir=\"/srv/oyh/DataBase/SMIC/HS_naming_modified/\" wb=xlrd.open_workbook('/srv/oyh/DataBase/SMIC_label.xlsx'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=2,start_rowx=1,end_rowx=None) expression=[int(x.value) for", "= 1 ## predict[predict<0.5] = 0 print (predict) print (Test_Y) #compute the ConfusionMat", "import Counter from sklearn.metrics import confusion_matrix import scipy.io as sio from keras.models import", "(np.shape(Test_Y)) print (np.shape(Test_X)) model.fit(Train_X, Train_Y, validation_split=0.05, epochs=1, batch_size=20) model.summary() predict=model.predict_classes(Test_X) ## predict[predict>= 0.5]", "counter = counter + numVid ##print(np.shape(labelperSub[1])) ##print(labelperSub[1]) ######## Seperating the input files into", "Train_Y.append(labelperSub[i]) elif sub==subjects-1: for i in range(subjects-1): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) else: for i in", "labelperSub.append(label[counter:counter+numVid]) counter = counter + numVid ##print(np.shape(labelperSub[1])) ##print(labelperSub[1]) ######## Seperating the input files", "R from the overall CT microAcc=np.trace(tot_mat)/np.sum(tot_mat) [f1,p,r]=fpr(tot_mat,n_exp) # save into a .txt file", "Train_X=np.vstack(Train_X) # changed to hstack from vstack # print(Train_X.shape) # Train_X = Train_X.shape[1:]", "in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) # print(type(table)) r=50; w=50 resizedFlag=1; subjects=26 samples=246 n_exp=5 VidPerSubject =", "vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) subjects=26", "Train_X.shape + (1, 1,) ) # Train_X = np.reshape( Train_X, Train_X.shape ) #", "# Train_X=np.hstack(Train_X) # print(Train_X.shape) Train_X=np.vstack(Train_X) # changed to hstack from vstack # print(Train_X.shape)", "as np import sys import math import operator import csv import glob,os import", "var==0: FrameperVid=img.flatten() else: FrameperVid=np.vstack((FrameperVid,img.flatten())) VidperSub.append(FrameperVid) SubperdB.append(VidperSub) ##### Setting up the LSTM model ########", "vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) #", "listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB== \"CASME2_TIM\":", "ct: thewriter.writerow(row) thewriter.writerow(order) thewriter.writerow('\\n') if sub==subjects-1: # compute the accuracy, F1, P and", "thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('micro:' + str(microAcc)) thewriter.writerow('F1:' + str(f1)) thewriter.writerow('Precision:' + str(p)) thewriter.writerow('Recall:'", "# print(Train_X) # Train_X=np.hstack(Train_X) # print(Train_X.shape) Train_X=np.vstack(Train_X) # changed to hstack from vstack", "keras import backend as K from labelling import collectinglabel from reordering import readinput", "Train_X = np.reshape( Train_X, Train_X.shape ) # Train_X = np.reshape(2500, 16077) print(Train_X.shape) Train_Y=np.hstack(Train_Y)", "## ##read the label for each input video collectinglabel(table, sub[3:], vid, workplace+'Classification/', dB)", "numVid ##print(np.shape(labelperSub[1])) ##print(labelperSub[1]) ######## Seperating the input files into LOSO CV ######## tot_mat=np.zeros((n_exp,n_exp))", "x in colm] colm=ws.col_slice(colx=2,start_rowx=1,end_rowx=None) expression=[int(x.value) for x in colm] table=np.transpose(np.array([np.array(vidName),np.array(expression)],dtype=str)) samples=164; #6 samples", "##print(labelperSub[1]) ######## Seperating the input files into LOSO CV ######## tot_mat=np.zeros((n_exp,n_exp)) for sub", "Train_X.shape[1:] # print(Train_X.shape) # Train_X = np.expand_dims(Train_X, axis=2) # Train_X = np.reshape(Train_X, Train_X.shape", "each CV into .txt file if not os.path.exists(workplace+'Classification/'+'Result/'+dB+'/'): os.mkdir(workplace+'Classification/'+ 'Result/'+dB+'/') with open(workplace+'Classification/'+ 'Result/'+dB+'/sub_CT.txt','a')", "'Result/'+dB+'/final_CT.txt','w') as csvfile: thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in tot_mat: thewriter.writerow(row) thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('micro:'", "dB +'_label.txt') labelperSub=[] counter = 0 for sub in range(subjects): numVid=VidPerSubject[sub] labelperSub.append(label[counter:counter+numVid]) counter", "ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=2,start_rowx=1,end_rowx=None) expression=[int(x.value) for x in colm]", "accordingly, in order to form the overall ConfusionMat for m in range(len(order)): for", "counter + numVid ##print(np.shape(labelperSub[1])) ##print(labelperSub[1]) ######## Seperating the input files into LOSO CV", "for var in range(numFrame): img=cv2.imread(imgList[var]) [_,_,dim]=img.shape if dim ==3: img=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) if resizedFlag ==1:", "else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB== \"CASME2_TIM\": inputDir='/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' #replace with croppoed for testing wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0)", "print(dB) # print(path) imgList=readinput(path,dB) numFrame=len(imgList) if resizedFlag ==1: col=w row=r else: img=cv2.imread(imgList[0]) [row,col,_l]=img.shape", "table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) # print(type(table)) r=50; w=50 resizedFlag=1; subjects=26 samples=246 n_exp=5 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02/','sub09/EP02_02f/','sub10/EP13_01/','sub17/EP15_01/',", "images ######## SubperdB=[] for sub in sorted([infile for infile in os.listdir(inputDir)]): VidperSub=[] for", "return_sequences=True, input_shape=(timesteps, data_dim))) model.add(LSTM(500,return_sequences=False)) ##model.add(LSTM(500,return_sequences=True)) ##model.add(LSTM(50,return_sequences=False)) model.add(Dense(50,activation='sigmoid')) model.add(Dense(5,activation='sigmoid')) model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=[metrics.categorical_accuracy]) #### generate the label", "# model.add(TimeDistributed(Dense(data_dim), input_shape=(timesteps, data_dim))) model.add(LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim))) model.add(LSTM(500,return_sequences=False)) ##model.add(LSTM(500,return_sequences=True)) ##model.add(LSTM(50,return_sequences=False)) model.add(Dense(50,activation='sigmoid')) model.add(Dense(5,activation='sigmoid'))", "######## data_dim=r*w # 2500 print(data_dim) timesteps=10 # LSTM1 = LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim))", ".txt file with open(workplace+'Classification/'+ 'Result/'+dB+'/final_CT.txt','w') as csvfile: thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in tot_mat: thewriter.writerow(row)", "cv2 import pandas as pd from sklearn.svm import SVC from collections import Counter", "Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) # print(Train_X) # Train_X=np.hstack(Train_X) # print(Train_X.shape) Train_X=np.vstack(Train_X) # changed to hstack", "1 ## predict[predict<0.5] = 0 print (predict) print (Test_Y) #compute the ConfusionMat ct=confusion_matrix(Test_Y,predict)", "confusion_matrix import scipy.io as sio from keras.models import Sequential from keras.layers import LSTM,", "else: Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) # print(Train_X) # Train_X=np.hstack(Train_X) # print(Train_X.shape) Train_X=np.vstack(Train_X) # changed to", "subjects ######### label=np.loadtxt(workplace+'Classification/'+ dB +'_label.txt') labelperSub=[] counter = 0 for sub in range(subjects):", "import glob,os import xlrd import cv2 import pandas as pd from sklearn.svm import", "##read the label for each input video collectinglabel(table, sub[3:], vid, workplace+'Classification/', dB) for", "n_exp=3; r= 170;w=140; VidPerSubject = [6,6,39,19,2,4,13,4,7,9,10,10,4,7,2,22]; listOfIgnoredSamples=[]; resizedFlag=1; else: print(\"NOT in the selection.\")", "i in range(subjects): if sub == i: continue else: Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) # print(Train_X)", "import operator import csv import glob,os import xlrd import cv2 import pandas as", "'/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' if dB == \"CASME2_raw\": inputDir='/media/ice/OS/Datasets/CASME2-RAW/' resizedFlag=1; elif dB== \"CASME2_large\": inputDir='/media/ice/OS/Datasets/CASME 2/' wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME", "in sorted([inrfile for inrfile in os.listdir(inputDir+sub)]): path=inputDir + sub + '/'+ vid +", "generate the label based on subjects ######### label=np.loadtxt(workplace+'Classification/'+ dB +'_label.txt') labelperSub=[] counter =", "in colm] table=np.transpose(np.array([np.array(vidName),np.array(expression)],dtype=str)) samples=164; #6 samples are excluded subjects=16; n_exp=3; r= 170;w=140; VidPerSubject", "the ConfusionMat ct=confusion_matrix(Test_Y,predict) #check the order of the CT order=np.unique(np.concatenate((predict,Test_Y))) #create an array", "np.reshape(Train_X, Train_X.shape + (1, 1,) ) # Train_X = np.reshape( Train_X, Train_X.shape )", "to hstack from vstack # print(Train_X.shape) # Train_X = Train_X.shape[1:] # print(Train_X.shape) #", "x in colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) # print(type(table)) r=50;", "F1, P and R from the overall CT microAcc=np.trace(tot_mat)/np.sum(tot_mat) [f1,p,r]=fpr(tot_mat,n_exp) # save into", "listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB== \"CASME2_TIM\": inputDir='/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' #replace with croppoed for testing wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None)", "for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) subjects=26 samples=246 n_exp=5 resizedFlag=1; r=68; w=56 VidPerSubject =", "LSTM, Dense, TimeDistributed from keras.utils import np_utils from keras import metrics from keras", "# print(Train_X.shape) Train_X=np.vstack(Train_X) # changed to hstack from vstack # print(Train_X.shape) # Train_X", "each CV mat=np.zeros((n_exp,n_exp)) #put the order accordingly, in order to form the overall", "csvfile: thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in tot_mat: thewriter.writerow(row) thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('micro:' + str(microAcc))", "validation_split=0.05, epochs=1, batch_size=20) model.summary() predict=model.predict_classes(Test_X) ## predict[predict>= 0.5] = 1 ## predict[predict<0.5] =", "\"CASME2_raw\": inputDir='/media/ice/OS/Datasets/CASME2-RAW/' resizedFlag=1; elif dB== \"CASME2_large\": inputDir='/media/ice/OS/Datasets/CASME 2/' wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME 2/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value)", "testing wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x in colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x", "range(subjects): Train_X=[] Train_Y=[] Test_X=SubperdB[sub] Test_X=np.array(Test_X) Test_Y=labelperSub[sub] Test_Yy=np_utils.to_categorical(Test_Y,5) print(Test_Y) ## print(np.shape(Test_Y)) if sub==0: for", "keras.models import Sequential from keras.layers import LSTM, Dense, TimeDistributed from keras.utils import np_utils", "continue else: Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) # print(Train_X) # Train_X=np.hstack(Train_X) # print(Train_X.shape) Train_X=np.vstack(Train_X) # changed", "# Train_X = np.reshape(2500, 16077) print(Train_X.shape) Train_Y=np.hstack(Train_Y) Train_Y=np_utils.to_categorical(Train_Y,5) print (np.shape(Train_Y)) print (np.shape(Train_X)) print", "[9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02','sub09/EP02_02f','sub10/EP13_01','sub17/EP15_01', 'sub17/EP15_03','sub19/EP19_04','sub24/EP10_03','sub24/EP07_01', 'sub24/EP07_04f','sub24/EP02_07','sub26/EP15_01'] listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else:", "print(\"NOT in the selection.\") ######### Reading in the input images ######## SubperdB=[] for", "input images ######## SubperdB=[] for sub in sorted([infile for infile in os.listdir(inputDir)]): VidperSub=[]", "pandas as pd from sklearn.svm import SVC from collections import Counter from sklearn.metrics", "axis=2) # Train_X = np.reshape(Train_X, Train_X.shape + (1, 1,) ) # Train_X =", "VidperSub.append(FrameperVid) SubperdB.append(VidperSub) ##### Setting up the LSTM model ######## data_dim=r*w # 2500 print(data_dim)", "#replace with croppoed for testing wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x in colm]", "input_shape=(timesteps, data_dim))) model.add(LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim))) model.add(LSTM(500,return_sequences=False)) ##model.add(LSTM(500,return_sequences=True)) ##model.add(LSTM(50,return_sequences=False)) model.add(Dense(50,activation='sigmoid')) model.add(Dense(5,activation='sigmoid')) model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=[metrics.categorical_accuracy]) ####", "in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB == \"SMIC\": inputDir=\"/srv/oyh/DataBase/SMIC/HS_naming_modified/\" wb=xlrd.open_workbook('/srv/oyh/DataBase/SMIC_label.xlsx');", "range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB == \"SMIC\": inputDir=\"/srv/oyh/DataBase/SMIC/HS_naming_modified/\" wb=xlrd.open_workbook('/srv/oyh/DataBase/SMIC_label.xlsx'); ws=wb.sheet_by_index(0)", "readinput from evaluationmatrix import fpr workplace='/media/ice/OS/Datasets/CASME2_TIM/' dB=\"CASME2_TIM\" rootpath = '/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' if dB ==", "video collectinglabel(table, sub[3:], vid, workplace+'Classification/', dB) for var in range(numFrame): img=cv2.imread(imgList[var]) [_,_,dim]=img.shape if", "==1: #in resize function, [col,row] img=cv2.resize(img,(col,row)) if var==0: FrameperVid=img.flatten() else: FrameperVid=np.vstack((FrameperVid,img.flatten())) VidperSub.append(FrameperVid) SubperdB.append(VidperSub)", "Train_X = np.reshape(2500, 16077) print(Train_X.shape) Train_Y=np.hstack(Train_Y) Train_Y=np_utils.to_categorical(Train_Y,5) print (np.shape(Train_Y)) print (np.shape(Train_X)) print (np.shape(Test_Y))", "print (Test_Y) #compute the ConfusionMat ct=confusion_matrix(Test_Y,predict) #check the order of the CT order=np.unique(np.concatenate((predict,Test_Y)))", "the overall ConfusionMat for m in range(len(order)): for n in range(len(order)): mat[int(order[m]),int(order[n])]=ct[m,n] tot_mat=mat+tot_mat", "sub in range(subjects): numVid=VidPerSubject[sub] labelperSub.append(label[counter:counter+numVid]) counter = counter + numVid ##print(np.shape(labelperSub[1])) ##print(labelperSub[1]) ########", "= counter + numVid ##print(np.shape(labelperSub[1])) ##print(labelperSub[1]) ######## Seperating the input files into LOSO", "open(workplace+'Classification/'+ 'Result/'+dB+'/sub_CT.txt','a') as csvfile: thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('Sub ' + str(sub+1)) thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for", "print(Train_X) # Train_X=np.hstack(Train_X) # print(Train_X.shape) Train_X=np.vstack(Train_X) # changed to hstack from vstack #", "######## SubperdB=[] for sub in sorted([infile for infile in os.listdir(inputDir)]): VidperSub=[] for vid", "else: FrameperVid=np.vstack((FrameperVid,img.flatten())) VidperSub.append(FrameperVid) SubperdB.append(VidperSub) ##### Setting up the LSTM model ######## data_dim=r*w #", "SubperdB=[] for sub in sorted([infile for infile in os.listdir(inputDir)]): VidperSub=[] for vid in", "# print(Train_X.shape) # Train_X = Train_X.shape[1:] # print(Train_X.shape) # Train_X = np.expand_dims(Train_X, axis=2)", "#6 samples are excluded subjects=16; n_exp=3; r= 170;w=140; VidPerSubject = [6,6,39,19,2,4,13,4,7,9,10,10,4,7,2,22]; listOfIgnoredSamples=[]; resizedFlag=1;", "range(subjects): numVid=VidPerSubject[sub] labelperSub.append(label[counter:counter+numVid]) counter = counter + numVid ##print(np.shape(labelperSub[1])) ##print(labelperSub[1]) ######## Seperating the", "r= 170;w=140; VidPerSubject = [6,6,39,19,2,4,13,4,7,9,10,10,4,7,2,22]; listOfIgnoredSamples=[]; resizedFlag=1; else: print(\"NOT in the selection.\") #########", "with croppoed for testing wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x in colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None)", "VidperSub=[] for vid in sorted([inrfile for inrfile in os.listdir(inputDir+sub)]): path=inputDir + sub +", "labelling import collectinglabel from reordering import readinput from evaluationmatrix import fpr workplace='/media/ice/OS/Datasets/CASME2_TIM/' dB=\"CASME2_TIM\"", "for x in colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) subjects=26 samples=246", "input_shape=(timesteps, data_dim)) model=Sequential() # model.add(TimeDistributed(Dense(data_dim), input_shape=(timesteps, data_dim))) model.add(LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim))) model.add(LSTM(500,return_sequences=False)) ##model.add(LSTM(500,return_sequences=True))", "else: img=cv2.imread(imgList[0]) [row,col,_l]=img.shape ## ##read the label for each input video collectinglabel(table, sub[3:],", "collectinglabel from reordering import readinput from evaluationmatrix import fpr workplace='/media/ice/OS/Datasets/CASME2_TIM/' dB=\"CASME2_TIM\" rootpath =", "for s in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB == \"SMIC\":", "Train_X = np.reshape(Train_X, Train_X.shape + (1, 1,) ) # Train_X = np.reshape( Train_X,", "TimeDistributed from keras.utils import np_utils from keras import metrics from keras import backend", "Test_X=np.array(Test_X) Test_Y=labelperSub[sub] Test_Yy=np_utils.to_categorical(Test_Y,5) print(Test_Y) ## print(np.shape(Test_Y)) if sub==0: for i in range(1,subjects): Train_X.append(SubperdB[i])", "for row in ct: thewriter.writerow(row) thewriter.writerow(order) thewriter.writerow('\\n') if sub==subjects-1: # compute the accuracy,", "colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) # print(type(table)) r=50; w=50 resizedFlag=1; subjects=26", "import LSTM, Dense, TimeDistributed from keras.utils import np_utils from keras import metrics from", "range(subjects): if sub == i: continue else: Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) # print(Train_X) # Train_X=np.hstack(Train_X)", "print (predict) print (Test_Y) #compute the ConfusionMat ct=confusion_matrix(Test_Y,predict) #check the order of the", "row in tot_mat: thewriter.writerow(row) thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('micro:' + str(microAcc)) thewriter.writerow('F1:' + str(f1))", "vid in sorted([inrfile for inrfile in os.listdir(inputDir+sub)]): path=inputDir + sub + '/'+ vid", "x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) subjects=26 samples=246 n_exp=5 resizedFlag=1; r=68; w=56 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16]", "s in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB== \"CASME2_TIM\": inputDir='/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' #replace", "for sub in range(subjects): Train_X=[] Train_Y=[] Test_X=SubperdB[sub] Test_X=np.array(Test_X) Test_Y=labelperSub[sub] Test_Yy=np_utils.to_categorical(Test_Y,5) print(Test_Y) ## print(np.shape(Test_Y))", "in the input images ######## SubperdB=[] for sub in sorted([infile for infile in", "import np_utils from keras import metrics from keras import backend as K from", "import confusion_matrix import scipy.io as sio from keras.models import Sequential from keras.layers import", "inputDir='/media/ice/OS/Datasets/CASME2-RAW/' resizedFlag=1; elif dB== \"CASME2_large\": inputDir='/media/ice/OS/Datasets/CASME 2/' wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME 2/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for", "label=np.loadtxt(workplace+'Classification/'+ dB +'_label.txt') labelperSub=[] counter = 0 for sub in range(subjects): numVid=VidPerSubject[sub] labelperSub.append(label[counter:counter+numVid])", "##print(np.shape(labelperSub[1])) ##print(labelperSub[1]) ######## Seperating the input files into LOSO CV ######## tot_mat=np.zeros((n_exp,n_exp)) for", "n in range(len(order)): mat[int(order[m]),int(order[n])]=ct[m,n] tot_mat=mat+tot_mat # write each CT of each CV into", "# print(Train_X.shape) # Train_X = np.expand_dims(Train_X, axis=2) # Train_X = np.reshape(Train_X, Train_X.shape +", "= [6,6,39,19,2,4,13,4,7,9,10,10,4,7,2,22]; listOfIgnoredSamples=[]; resizedFlag=1; else: print(\"NOT in the selection.\") ######### Reading in the", "metrics from keras import backend as K from labelling import collectinglabel from reordering", "colm=ws.col_slice(colx=2,start_rowx=1,end_rowx=None) expression=[int(x.value) for x in colm] table=np.transpose(np.array([np.array(vidName),np.array(expression)],dtype=str)) samples=164; #6 samples are excluded subjects=16;", "if resizedFlag ==1: #in resize function, [col,row] img=cv2.resize(img,(col,row)) if var==0: FrameperVid=img.flatten() else: FrameperVid=np.vstack((FrameperVid,img.flatten()))", "each input video collectinglabel(table, sub[3:], vid, workplace+'Classification/', dB) for var in range(numFrame): img=cv2.imread(imgList[var])", "Train_X = Train_X.shape[1:] # print(Train_X.shape) # Train_X = np.expand_dims(Train_X, axis=2) # Train_X =", "range(numFrame): img=cv2.imread(imgList[var]) [_,_,dim]=img.shape if dim ==3: img=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) if resizedFlag ==1: #in resize function,", "the label based on subjects ######### label=np.loadtxt(workplace+'Classification/'+ dB +'_label.txt') labelperSub=[] counter = 0", "') thewriter.writerow('micro:' + str(microAcc)) thewriter.writerow('F1:' + str(f1)) thewriter.writerow('Precision:' + str(p)) thewriter.writerow('Recall:' + str(r))", "delimiter=' ') thewriter.writerow('micro:' + str(microAcc)) thewriter.writerow('F1:' + str(f1)) thewriter.writerow('Precision:' + str(p)) thewriter.writerow('Recall:' +", "row=r else: img=cv2.imread(imgList[0]) [row,col,_l]=img.shape ## ##read the label for each input video collectinglabel(table,", "from collections import Counter from sklearn.metrics import confusion_matrix import scipy.io as sio from", "\"CASME2_large\": inputDir='/media/ice/OS/Datasets/CASME 2/' wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME 2/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x in colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None)", "2/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x in colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in", "in range(subjects-1): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) else: for i in range(subjects): if sub == i:", "model.add(TimeDistributed(Dense(data_dim), input_shape=(timesteps, data_dim))) model.add(LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim))) model.add(LSTM(500,return_sequences=False)) ##model.add(LSTM(500,return_sequences=True)) ##model.add(LSTM(50,return_sequences=False)) model.add(Dense(50,activation='sigmoid')) model.add(Dense(5,activation='sigmoid')) model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=[metrics.categorical_accuracy])", "0.5] = 1 ## predict[predict<0.5] = 0 print (predict) print (Test_Y) #compute the", "in range(1,subjects): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) elif sub==subjects-1: for i in range(subjects-1): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) else:", "for x in colm] colm=ws.col_slice(colx=2,start_rowx=1,end_rowx=None) expression=[int(x.value) for x in colm] table=np.transpose(np.array([np.array(vidName),np.array(expression)],dtype=str)) samples=164; #6", "if sub==0: for i in range(1,subjects): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) elif sub==subjects-1: for i in", "sub in range(subjects): Train_X=[] Train_Y=[] Test_X=SubperdB[sub] Test_X=np.array(Test_X) Test_Y=labelperSub[sub] Test_Yy=np_utils.to_categorical(Test_Y,5) print(Test_Y) ## print(np.shape(Test_Y)) if", "Dense, TimeDistributed from keras.utils import np_utils from keras import metrics from keras import", "the accuracy, F1, P and R from the overall CT microAcc=np.trace(tot_mat)/np.sum(tot_mat) [f1,p,r]=fpr(tot_mat,n_exp) #", "colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) # print(type(table)) r=50; w=50 resizedFlag=1; subjects=26 samples=246 n_exp=5 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16]", "dB== \"CASME2_large\": inputDir='/media/ice/OS/Datasets/CASME 2/' wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME 2/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x in colm]", "samples=164; #6 samples are excluded subjects=16; n_exp=3; r= 170;w=140; VidPerSubject = [6,6,39,19,2,4,13,4,7,9,10,10,4,7,2,22]; listOfIgnoredSamples=[];", "from sklearn.svm import SVC from collections import Counter from sklearn.metrics import confusion_matrix import", "file if not os.path.exists(workplace+'Classification/'+'Result/'+dB+'/'): os.mkdir(workplace+'Classification/'+ 'Result/'+dB+'/') with open(workplace+'Classification/'+ 'Result/'+dB+'/sub_CT.txt','a') as csvfile: thewriter=csv.writer(csvfile, delimiter='", "model.add(Dense(5,activation='sigmoid')) model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=[metrics.categorical_accuracy]) #### generate the label based on subjects ######### label=np.loadtxt(workplace+'Classification/'+ dB +'_label.txt')", "thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in ct: thewriter.writerow(row) thewriter.writerow(order) thewriter.writerow('\\n') if sub==subjects-1: # compute the", "scipy.io as sio from keras.models import Sequential from keras.layers import LSTM, Dense, TimeDistributed", "= '/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' if dB == \"CASME2_raw\": inputDir='/media/ice/OS/Datasets/CASME2-RAW/' resizedFlag=1; elif dB== \"CASME2_large\": inputDir='/media/ice/OS/Datasets/CASME 2/'", "r=68; w=56 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02','sub09/EP02_02f','sub10/EP13_01','sub17/EP15_01', 'sub17/EP15_03','sub19/EP19_04','sub24/EP10_03','sub24/EP07_01', 'sub24/EP07_04f','sub24/EP02_07','sub26/EP15_01'] listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)):", "in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB== \"CASME2_TIM\": inputDir='/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' #replace with", "tot_mat: thewriter.writerow(row) thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('micro:' + str(microAcc)) thewriter.writerow('F1:' + str(f1)) thewriter.writerow('Precision:' +", "# changed to hstack from vstack # print(Train_X.shape) # Train_X = Train_X.shape[1:] #", "order of the CT order=np.unique(np.concatenate((predict,Test_Y))) #create an array to hold the CT for", "in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) subjects=26 samples=246 n_exp=5 resizedFlag=1; r=68; w=56 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02','sub09/EP02_02f','sub10/EP13_01','sub17/EP15_01',", "+ '/' if path in listOfIgnoredSamples: continue # print(dB) # print(path) imgList=readinput(path,dB) numFrame=len(imgList)", "thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in tot_mat: thewriter.writerow(row) thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('micro:' + str(microAcc)) thewriter.writerow('F1:'", "print(Train_X.shape) Train_X=np.vstack(Train_X) # changed to hstack from vstack # print(Train_X.shape) # Train_X =", "ConfusionMat for m in range(len(order)): for n in range(len(order)): mat[int(order[m]),int(order[n])]=ct[m,n] tot_mat=mat+tot_mat # write", "form the overall ConfusionMat for m in range(len(order)): for n in range(len(order)): mat[int(order[m]),int(order[n])]=ct[m,n]", "i: continue else: Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) # print(Train_X) # Train_X=np.hstack(Train_X) # print(Train_X.shape) Train_X=np.vstack(Train_X) #", "'sub24/EP07_04f/','sub24/EP02_07/','sub26/EP15_01/'] listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB", "Counter from sklearn.metrics import confusion_matrix import scipy.io as sio from keras.models import Sequential", "operator import csv import glob,os import xlrd import cv2 import pandas as pd", "n_exp=5 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02/','sub09/EP02_02f/','sub10/EP13_01/','sub17/EP15_01/', 'sub17/EP15_03/','sub19/EP19_04/','sub24/EP10_03/','sub24/EP07_01/', 'sub24/EP07_04f/','sub24/EP02_07/','sub26/EP15_01/'] listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)): if", "files into LOSO CV ######## tot_mat=np.zeros((n_exp,n_exp)) for sub in range(subjects): Train_X=[] Train_Y=[] Test_X=SubperdB[sub]", "of the CT order=np.unique(np.concatenate((predict,Test_Y))) #create an array to hold the CT for each", "into a .txt file with open(workplace+'Classification/'+ 'Result/'+dB+'/final_CT.txt','w') as csvfile: thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in", "+ str(sub+1)) thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in ct: thewriter.writerow(row) thewriter.writerow(order) thewriter.writerow('\\n') if sub==subjects-1: #", "thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('Sub ' + str(sub+1)) thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in ct: thewriter.writerow(row)", "x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) # print(type(table)) r=50; w=50 resizedFlag=1; subjects=26 samples=246 n_exp=5 VidPerSubject", "import Sequential from keras.layers import LSTM, Dense, TimeDistributed from keras.utils import np_utils from", "= Train_X.shape[1:] # print(Train_X.shape) # Train_X = np.expand_dims(Train_X, axis=2) # Train_X = np.reshape(Train_X,", "as pd from sklearn.svm import SVC from collections import Counter from sklearn.metrics import", "in os.listdir(inputDir)]): VidperSub=[] for vid in sorted([inrfile for inrfile in os.listdir(inputDir+sub)]): path=inputDir +", "sio from keras.models import Sequential from keras.layers import LSTM, Dense, TimeDistributed from keras.utils", "in order to form the overall ConfusionMat for m in range(len(order)): for n", "inputDir='/media/ice/OS/Datasets/CASME 2/' wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME 2/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x in colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value)", "print(data_dim) timesteps=10 # LSTM1 = LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim)) model=Sequential() # model.add(TimeDistributed(Dense(data_dim), input_shape=(timesteps,", "in range(subjects): if sub == i: continue else: Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) # print(Train_X) #", "hstack from vstack # print(Train_X.shape) # Train_X = Train_X.shape[1:] # print(Train_X.shape) # Train_X", "# save into a .txt file with open(workplace+'Classification/'+ 'Result/'+dB+'/final_CT.txt','w') as csvfile: thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for", "iD=[str(x.value) for x in colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value)", "= [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02','sub09/EP02_02f','sub10/EP13_01','sub17/EP15_01', 'sub17/EP15_03','sub19/EP19_04','sub24/EP10_03','sub24/EP07_01', 'sub24/EP07_04f','sub24/EP02_07','sub26/EP15_01'] listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]]", "in range(len(order)): mat[int(order[m]),int(order[n])]=ct[m,n] tot_mat=mat+tot_mat # write each CT of each CV into .txt", "16077) print(Train_X.shape) Train_Y=np.hstack(Train_Y) Train_Y=np_utils.to_categorical(Train_Y,5) print (np.shape(Train_Y)) print (np.shape(Train_X)) print (np.shape(Test_Y)) print (np.shape(Test_X)) model.fit(Train_X,", "Train_X=np.hstack(Train_X) # print(Train_X.shape) Train_X=np.vstack(Train_X) # changed to hstack from vstack # print(Train_X.shape) #", "if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB == \"SMIC\": inputDir=\"/srv/oyh/DataBase/SMIC/HS_naming_modified/\" wb=xlrd.open_workbook('/srv/oyh/DataBase/SMIC_label.xlsx'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None)", "open(workplace+'Classification/'+ 'Result/'+dB+'/final_CT.txt','w') as csvfile: thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in tot_mat: thewriter.writerow(row) thewriter=csv.writer(csvfile, delimiter=' ')", "the input images ######## SubperdB=[] for sub in sorted([infile for infile in os.listdir(inputDir)]):", "workplace+'Classification/', dB) for var in range(numFrame): img=cv2.imread(imgList[var]) [_,_,dim]=img.shape if dim ==3: img=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) if", "(predict) print (Test_Y) #compute the ConfusionMat ct=confusion_matrix(Test_Y,predict) #check the order of the CT", "listOfIgnoredSamples=[]; resizedFlag=1; else: print(\"NOT in the selection.\") ######### Reading in the input images", "np_utils from keras import metrics from keras import backend as K from labelling", "function, [col,row] img=cv2.resize(img,(col,row)) if var==0: FrameperVid=img.flatten() else: FrameperVid=np.vstack((FrameperVid,img.flatten())) VidperSub.append(FrameperVid) SubperdB.append(VidperSub) ##### Setting up", "the label for each input video collectinglabel(table, sub[3:], vid, workplace+'Classification/', dB) for var", "# print(type(table)) r=50; w=50 resizedFlag=1; subjects=26 samples=246 n_exp=5 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02/','sub09/EP02_02f/','sub10/EP13_01/','sub17/EP15_01/', 'sub17/EP15_03/','sub19/EP19_04/','sub24/EP10_03/','sub24/EP07_01/',", "thewriter.writerow('Sub ' + str(sub+1)) thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in ct: thewriter.writerow(row) thewriter.writerow(order) thewriter.writerow('\\n') if", "<filename>archive/original_main.py import numpy as np import sys import math import operator import csv", "==1: col=w row=r else: img=cv2.imread(imgList[0]) [row,col,_l]=img.shape ## ##read the label for each input", "sys import math import operator import csv import glob,os import xlrd import cv2", "if dim ==3: img=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) if resizedFlag ==1: #in resize function, [col,row] img=cv2.resize(img,(col,row)) if", "+ numVid ##print(np.shape(labelperSub[1])) ##print(labelperSub[1]) ######## Seperating the input files into LOSO CV ########", "for x in colm] table=np.transpose(np.array([np.array(vidName),np.array(expression)],dtype=str)) samples=164; #6 samples are excluded subjects=16; n_exp=3; r=", "Sequential from keras.layers import LSTM, Dense, TimeDistributed from keras.utils import np_utils from keras", "Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) elif sub==subjects-1: for i in range(subjects-1): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) else: for i", "Test_X=SubperdB[sub] Test_X=np.array(Test_X) Test_Y=labelperSub[sub] Test_Yy=np_utils.to_categorical(Test_Y,5) print(Test_Y) ## print(np.shape(Test_Y)) if sub==0: for i in range(1,subjects):", "resizedFlag=1; else: print(\"NOT in the selection.\") ######### Reading in the input images ########", "continue # print(dB) # print(path) imgList=readinput(path,dB) numFrame=len(imgList) if resizedFlag ==1: col=w row=r else:", "= 0 for sub in range(subjects): numVid=VidPerSubject[sub] labelperSub.append(label[counter:counter+numVid]) counter = counter + numVid", "LSTM model ######## data_dim=r*w # 2500 print(data_dim) timesteps=10 # LSTM1 = LSTM(2500, return_sequences=True,", "# LSTM1 = LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim)) model=Sequential() # model.add(TimeDistributed(Dense(data_dim), input_shape=(timesteps, data_dim))) model.add(LSTM(2500,", "timesteps=10 # LSTM1 = LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim)) model=Sequential() # model.add(TimeDistributed(Dense(data_dim), input_shape=(timesteps, data_dim)))", "Test_Yy=np_utils.to_categorical(Test_Y,5) print(Test_Y) ## print(np.shape(Test_Y)) if sub==0: for i in range(1,subjects): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) elif", "Train_Y=np.hstack(Train_Y) Train_Y=np_utils.to_categorical(Train_Y,5) print (np.shape(Train_Y)) print (np.shape(Train_X)) print (np.shape(Test_Y)) print (np.shape(Test_X)) model.fit(Train_X, Train_Y, validation_split=0.05,", "Train_Y=[] Test_X=SubperdB[sub] Test_X=np.array(Test_X) Test_Y=labelperSub[sub] Test_Yy=np_utils.to_categorical(Test_Y,5) print(Test_Y) ## print(np.shape(Test_Y)) if sub==0: for i in", "overall ConfusionMat for m in range(len(order)): for n in range(len(order)): mat[int(order[m]),int(order[n])]=ct[m,n] tot_mat=mat+tot_mat #", "for each CV mat=np.zeros((n_exp,n_exp)) #put the order accordingly, in order to form the", "model ######## data_dim=r*w # 2500 print(data_dim) timesteps=10 # LSTM1 = LSTM(2500, return_sequences=True, input_shape=(timesteps,", "(1, 1,) ) # Train_X = np.reshape( Train_X, Train_X.shape ) # Train_X =", "str(sub+1)) thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in ct: thewriter.writerow(row) thewriter.writerow(order) thewriter.writerow('\\n') if sub==subjects-1: # compute", "in tot_mat: thewriter.writerow(row) thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('micro:' + str(microAcc)) thewriter.writerow('F1:' + str(f1)) thewriter.writerow('Precision:'", "path in listOfIgnoredSamples: continue # print(dB) # print(path) imgList=readinput(path,dB) numFrame=len(imgList) if resizedFlag ==1:", "tot_mat=np.zeros((n_exp,n_exp)) for sub in range(subjects): Train_X=[] Train_Y=[] Test_X=SubperdB[sub] Test_X=np.array(Test_X) Test_Y=labelperSub[sub] Test_Yy=np_utils.to_categorical(Test_Y,5) print(Test_Y) ##", "CT microAcc=np.trace(tot_mat)/np.sum(tot_mat) [f1,p,r]=fpr(tot_mat,n_exp) # save into a .txt file with open(workplace+'Classification/'+ 'Result/'+dB+'/final_CT.txt','w') as", "print(Train_X.shape) # Train_X = np.expand_dims(Train_X, axis=2) # Train_X = np.reshape(Train_X, Train_X.shape + (1,", "elif dB== \"CASME2_large\": inputDir='/media/ice/OS/Datasets/CASME 2/' wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME 2/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x in", "Train_Y, validation_split=0.05, epochs=1, batch_size=20) model.summary() predict=model.predict_classes(Test_X) ## predict[predict>= 0.5] = 1 ## predict[predict<0.5]", "thewriter.writerow(order) thewriter.writerow('\\n') if sub==subjects-1: # compute the accuracy, F1, P and R from", "to form the overall ConfusionMat for m in range(len(order)): for n in range(len(order)):", "if dB == \"CASME2_raw\": inputDir='/media/ice/OS/Datasets/CASME2-RAW/' resizedFlag=1; elif dB== \"CASME2_large\": inputDir='/media/ice/OS/Datasets/CASME 2/' wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME 2/CASME2_label_Ver_2.xls');", "Train_X.shape ) # Train_X = np.reshape(2500, 16077) print(Train_X.shape) Train_Y=np.hstack(Train_Y) Train_Y=np_utils.to_categorical(Train_Y,5) print (np.shape(Train_Y)) print", "are excluded subjects=16; n_exp=3; r= 170;w=140; VidPerSubject = [6,6,39,19,2,4,13,4,7,9,10,10,4,7,2,22]; listOfIgnoredSamples=[]; resizedFlag=1; else: print(\"NOT", "table=np.transpose(np.array([np.array(vidName),np.array(expression)],dtype=str)) samples=164; #6 samples are excluded subjects=16; n_exp=3; r= 170;w=140; VidPerSubject = [6,6,39,19,2,4,13,4,7,9,10,10,4,7,2,22];", "thewriter.writerow('\\n') if sub==subjects-1: # compute the accuracy, F1, P and R from the", "counter = 0 for sub in range(subjects): numVid=VidPerSubject[sub] labelperSub.append(label[counter:counter+numVid]) counter = counter +", "the overall CT microAcc=np.trace(tot_mat)/np.sum(tot_mat) [f1,p,r]=fpr(tot_mat,n_exp) # save into a .txt file with open(workplace+'Classification/'+", "predict[predict<0.5] = 0 print (predict) print (Test_Y) #compute the ConfusionMat ct=confusion_matrix(Test_Y,predict) #check the", "resizedFlag=1; subjects=26 samples=246 n_exp=5 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02/','sub09/EP02_02f/','sub10/EP13_01/','sub17/EP15_01/', 'sub17/EP15_03/','sub19/EP19_04/','sub24/EP10_03/','sub24/EP07_01/', 'sub24/EP07_04f/','sub24/EP02_07/','sub26/EP15_01/'] listOfIgnoredSamples=[] for s", "resizedFlag ==1: col=w row=r else: img=cv2.imread(imgList[0]) [row,col,_l]=img.shape ## ##read the label for each", "= 0 print (predict) print (Test_Y) #compute the ConfusionMat ct=confusion_matrix(Test_Y,predict) #check the order", "samples=246 n_exp=5 resizedFlag=1; r=68; w=56 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02','sub09/EP02_02f','sub10/EP13_01','sub17/EP15_01', 'sub17/EP15_03','sub19/EP19_04','sub24/EP10_03','sub24/EP07_01', 'sub24/EP07_04f','sub24/EP02_07','sub26/EP15_01'] listOfIgnoredSamples=[] for", "##### Setting up the LSTM model ######## data_dim=r*w # 2500 print(data_dim) timesteps=10 #", "as K from labelling import collectinglabel from reordering import readinput from evaluationmatrix import", "== i: continue else: Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) # print(Train_X) # Train_X=np.hstack(Train_X) # print(Train_X.shape) Train_X=np.vstack(Train_X)", "range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB== \"CASME2_TIM\": inputDir='/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' #replace with croppoed", "numFrame=len(imgList) if resizedFlag ==1: col=w row=r else: img=cv2.imread(imgList[0]) [row,col,_l]=img.shape ## ##read the label", "dB) for var in range(numFrame): img=cv2.imread(imgList[var]) [_,_,dim]=img.shape if dim ==3: img=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) if resizedFlag", "order accordingly, in order to form the overall ConfusionMat for m in range(len(order)):", "elif dB== \"CASME2_TIM\": inputDir='/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' #replace with croppoed for testing wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value)", "colm] table=np.transpose(np.array([np.array(vidName),np.array(expression)],dtype=str)) samples=164; #6 samples are excluded subjects=16; n_exp=3; r= 170;w=140; VidPerSubject =", "= np.reshape(2500, 16077) print(Train_X.shape) Train_Y=np.hstack(Train_Y) Train_Y=np_utils.to_categorical(Train_Y,5) print (np.shape(Train_Y)) print (np.shape(Train_X)) print (np.shape(Test_Y)) print", "dB== \"CASME2_TIM\": inputDir='/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' #replace with croppoed for testing wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for", "of each CV into .txt file if not os.path.exists(workplace+'Classification/'+'Result/'+dB+'/'): os.mkdir(workplace+'Classification/'+ 'Result/'+dB+'/') with open(workplace+'Classification/'+", "import xlrd import cv2 import pandas as pd from sklearn.svm import SVC from", "as csvfile: thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in tot_mat: thewriter.writerow(row) thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('micro:' +", "colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) # print(type(table)) r=50; w=50 resizedFlag=1;", "into LOSO CV ######## tot_mat=np.zeros((n_exp,n_exp)) for sub in range(subjects): Train_X=[] Train_Y=[] Test_X=SubperdB[sub] Test_X=np.array(Test_X)", ".txt file if not os.path.exists(workplace+'Classification/'+'Result/'+dB+'/'): os.mkdir(workplace+'Classification/'+ 'Result/'+dB+'/') with open(workplace+'Classification/'+ 'Result/'+dB+'/sub_CT.txt','a') as csvfile: thewriter=csv.writer(csvfile,", "import metrics from keras import backend as K from labelling import collectinglabel from", "Train_X = np.expand_dims(Train_X, axis=2) # Train_X = np.reshape(Train_X, Train_X.shape + (1, 1,) )", "print(type(table)) r=50; w=50 resizedFlag=1; subjects=26 samples=246 n_exp=5 VidPerSubject = [9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02/','sub09/EP02_02f/','sub10/EP13_01/','sub17/EP15_01/', 'sub17/EP15_03/','sub19/EP19_04/','sub24/EP10_03/','sub24/EP07_01/', 'sub24/EP07_04f/','sub24/EP02_07/','sub26/EP15_01/']", "# compute the accuracy, F1, P and R from the overall CT microAcc=np.trace(tot_mat)/np.sum(tot_mat)", "[col,row] img=cv2.resize(img,(col,row)) if var==0: FrameperVid=img.flatten() else: FrameperVid=np.vstack((FrameperVid,img.flatten())) VidperSub.append(FrameperVid) SubperdB.append(VidperSub) ##### Setting up the", "x in colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) subjects=26 samples=246 n_exp=5", "dim ==3: img=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) if resizedFlag ==1: #in resize function, [col,row] img=cv2.resize(img,(col,row)) if var==0:", "Train_Y.append(labelperSub[i]) else: for i in range(subjects): if sub == i: continue else: Train_X.append(SubperdB[i])", "for x in colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) # print(type(table))", "from reordering import readinput from evaluationmatrix import fpr workplace='/media/ice/OS/Datasets/CASME2_TIM/' dB=\"CASME2_TIM\" rootpath = '/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/'", "LOSO CV ######## tot_mat=np.zeros((n_exp,n_exp)) for sub in range(subjects): Train_X=[] Train_Y=[] Test_X=SubperdB[sub] Test_X=np.array(Test_X) Test_Y=labelperSub[sub]", "# Train_X = np.reshape(Train_X, Train_X.shape + (1, 1,) ) # Train_X = np.reshape(", "labelperSub=[] counter = 0 for sub in range(subjects): numVid=VidPerSubject[sub] labelperSub.append(label[counter:counter+numVid]) counter = counter", "collectinglabel(table, sub[3:], vid, workplace+'Classification/', dB) for var in range(numFrame): img=cv2.imread(imgList[var]) [_,_,dim]=img.shape if dim", "#compute the ConfusionMat ct=confusion_matrix(Test_Y,predict) #check the order of the CT order=np.unique(np.concatenate((predict,Test_Y))) #create an", "row in ct: thewriter.writerow(row) thewriter.writerow(order) thewriter.writerow('\\n') if sub==subjects-1: # compute the accuracy, F1,", "else: print(\"NOT in the selection.\") ######### Reading in the input images ######## SubperdB=[]", "import fpr workplace='/media/ice/OS/Datasets/CASME2_TIM/' dB=\"CASME2_TIM\" rootpath = '/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' if dB == \"CASME2_raw\": inputDir='/media/ice/OS/Datasets/CASME2-RAW/' resizedFlag=1;", "wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME 2/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x in colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x", "if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB== \"CASME2_TIM\": inputDir='/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' #replace with croppoed for", "range(len(order)): mat[int(order[m]),int(order[n])]=ct[m,n] tot_mat=mat+tot_mat # write each CT of each CV into .txt file", "np.reshape(2500, 16077) print(Train_X.shape) Train_Y=np.hstack(Train_Y) Train_Y=np_utils.to_categorical(Train_Y,5) print (np.shape(Train_Y)) print (np.shape(Train_X)) print (np.shape(Test_Y)) print (np.shape(Test_X))", "epochs=1, batch_size=20) model.summary() predict=model.predict_classes(Test_X) ## predict[predict>= 0.5] = 1 ## predict[predict<0.5] = 0", "inrfile in os.listdir(inputDir+sub)]): path=inputDir + sub + '/'+ vid + '/' if path", "for i in range(1,subjects): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) elif sub==subjects-1: for i in range(subjects-1): Train_X.append(SubperdB[i])", "math import operator import csv import glob,os import xlrd import cv2 import pandas", "# 2500 print(data_dim) timesteps=10 # LSTM1 = LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim)) model=Sequential() #", "== \"CASME2_raw\": inputDir='/media/ice/OS/Datasets/CASME2-RAW/' resizedFlag=1; elif dB== \"CASME2_large\": inputDir='/media/ice/OS/Datasets/CASME 2/' wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME 2/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None)", "croppoed for testing wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x in colm] colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value)", "img=cv2.imread(imgList[0]) [row,col,_l]=img.shape ## ##read the label for each input video collectinglabel(table, sub[3:], vid,", "[9,13,7,5,19,5,9,3,13,13,10,12,8,4,3,4,34,3,15,11,2,2,12,7,7,16] IgnoredSamples=['sub09/EP13_02/','sub09/EP02_02f/','sub10/EP13_01/','sub17/EP15_01/', 'sub17/EP15_03/','sub19/EP19_04/','sub24/EP10_03/','sub24/EP07_01/', 'sub24/EP07_04f/','sub24/EP02_07/','sub26/EP15_01/'] listOfIgnoredSamples=[] for s in range(len(IgnoredSamples)): if s==0: listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else:", "from labelling import collectinglabel from reordering import readinput from evaluationmatrix import fpr workplace='/media/ice/OS/Datasets/CASME2_TIM/'", "import collectinglabel from reordering import readinput from evaluationmatrix import fpr workplace='/media/ice/OS/Datasets/CASME2_TIM/' dB=\"CASME2_TIM\" rootpath", "resizedFlag=1; elif dB== \"CASME2_large\": inputDir='/media/ice/OS/Datasets/CASME 2/' wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME 2/CASME2_label_Ver_2.xls'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=0,start_rowx=1,end_rowx=None) iD=[str(x.value) for x", "hold the CT for each CV mat=np.zeros((n_exp,n_exp)) #put the order accordingly, in order", "os.mkdir(workplace+'Classification/'+ 'Result/'+dB+'/') with open(workplace+'Classification/'+ 'Result/'+dB+'/sub_CT.txt','a') as csvfile: thewriter=csv.writer(csvfile, delimiter=' ') thewriter.writerow('Sub ' +", "sub[3:], vid, workplace+'Classification/', dB) for var in range(numFrame): img=cv2.imread(imgList[var]) [_,_,dim]=img.shape if dim ==3:", "#### generate the label based on subjects ######### label=np.loadtxt(workplace+'Classification/'+ dB +'_label.txt') labelperSub=[] counter", "# print(dB) # print(path) imgList=readinput(path,dB) numFrame=len(imgList) if resizedFlag ==1: col=w row=r else: img=cv2.imread(imgList[0])", "dB == \"SMIC\": inputDir=\"/srv/oyh/DataBase/SMIC/HS_naming_modified/\" wb=xlrd.open_workbook('/srv/oyh/DataBase/SMIC_label.xlsx'); ws=wb.sheet_by_index(0) colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=2,start_rowx=1,end_rowx=None)", "[f1,p,r]=fpr(tot_mat,n_exp) # save into a .txt file with open(workplace+'Classification/'+ 'Result/'+dB+'/final_CT.txt','w') as csvfile: thewriter=csv.writer(csvfile,dialect=csv.excel_tab)", "= LSTM(2500, return_sequences=True, input_shape=(timesteps, data_dim)) model=Sequential() # model.add(TimeDistributed(Dense(data_dim), input_shape=(timesteps, data_dim))) model.add(LSTM(2500, return_sequences=True, input_shape=(timesteps,", "in range(subjects): Train_X=[] Train_Y=[] Test_X=SubperdB[sub] Test_X=np.array(Test_X) Test_Y=labelperSub[sub] Test_Yy=np_utils.to_categorical(Test_Y,5) print(Test_Y) ## print(np.shape(Test_Y)) if sub==0:", "with open(workplace+'Classification/'+ 'Result/'+dB+'/final_CT.txt','w') as csvfile: thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row in tot_mat: thewriter.writerow(row) thewriter=csv.writer(csvfile, delimiter='", "into .txt file if not os.path.exists(workplace+'Classification/'+'Result/'+dB+'/'): os.mkdir(workplace+'Classification/'+ 'Result/'+dB+'/') with open(workplace+'Classification/'+ 'Result/'+dB+'/sub_CT.txt','a') as csvfile:", "numpy as np import sys import math import operator import csv import glob,os", "listOfIgnoredSamples=[inputDir+IgnoredSamples[s]] else: listOfIgnoredSamples.append(inputDir+IgnoredSamples[s]) elif dB== \"CASME2_TIM\": inputDir='/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' #replace with croppoed for testing wb=xlrd.open_workbook('/media/ice/OS/Datasets/CASME2_label_Ver_2.xls');", "for inrfile in os.listdir(inputDir+sub)]): path=inputDir + sub + '/'+ vid + '/' if", "##model.add(LSTM(500,return_sequences=True)) ##model.add(LSTM(50,return_sequences=False)) model.add(Dense(50,activation='sigmoid')) model.add(Dense(5,activation='sigmoid')) model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=[metrics.categorical_accuracy]) #### generate the label based on subjects #########", "imgList=readinput(path,dB) numFrame=len(imgList) if resizedFlag ==1: col=w row=r else: img=cv2.imread(imgList[0]) [row,col,_l]=img.shape ## ##read the", "ConfusionMat ct=confusion_matrix(Test_Y,predict) #check the order of the CT order=np.unique(np.concatenate((predict,Test_Y))) #create an array to", "input_shape=(timesteps, data_dim))) model.add(LSTM(500,return_sequences=False)) ##model.add(LSTM(500,return_sequences=True)) ##model.add(LSTM(50,return_sequences=False)) model.add(Dense(50,activation='sigmoid')) model.add(Dense(5,activation='sigmoid')) model.compile(loss='categorical_crossentropy',optimizer='Adam',metrics=[metrics.categorical_accuracy]) #### generate the label based", "import readinput from evaluationmatrix import fpr workplace='/media/ice/OS/Datasets/CASME2_TIM/' dB=\"CASME2_TIM\" rootpath = '/media/ice/OS/Datasets/CASME2_TIM/CASME2_TIM/' if dB", "sorted([infile for infile in os.listdir(inputDir)]): VidperSub=[] for vid in sorted([inrfile for inrfile in", "Test_Y=labelperSub[sub] Test_Yy=np_utils.to_categorical(Test_Y,5) print(Test_Y) ## print(np.shape(Test_Y)) if sub==0: for i in range(1,subjects): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i])", "colm=ws.col_slice(colx=1,start_rowx=1,end_rowx=None) vidName=[str(x.value) for x in colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str))", "an array to hold the CT for each CV mat=np.zeros((n_exp,n_exp)) #put the order", "csv import glob,os import xlrd import cv2 import pandas as pd from sklearn.svm", "if var==0: FrameperVid=img.flatten() else: FrameperVid=np.vstack((FrameperVid,img.flatten())) VidperSub.append(FrameperVid) SubperdB.append(VidperSub) ##### Setting up the LSTM model", "colm] colm=ws.col_slice(colx=6,start_rowx=1,end_rowx=None) expression=[str(x.value) for x in colm] table=np.transpose(np.array([np.array(iD),np.array(vidName),np.array(expression)],dtype=str)) subjects=26 samples=246 n_exp=5 resizedFlag=1; r=68;", "save into a .txt file with open(workplace+'Classification/'+ 'Result/'+dB+'/final_CT.txt','w') as csvfile: thewriter=csv.writer(csvfile,dialect=csv.excel_tab) for row", "sub==subjects-1: for i in range(subjects-1): Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) else: for i in range(subjects): if", "np.expand_dims(Train_X, axis=2) # Train_X = np.reshape(Train_X, Train_X.shape + (1, 1,) ) # Train_X", "SubperdB.append(VidperSub) ##### Setting up the LSTM model ######## data_dim=r*w # 2500 print(data_dim) timesteps=10", "for i in range(subjects): if sub == i: continue else: Train_X.append(SubperdB[i]) Train_Y.append(labelperSub[i]) #", "the CT order=np.unique(np.concatenate((predict,Test_Y))) #create an array to hold the CT for each CV", "from sklearn.metrics import confusion_matrix import scipy.io as sio from keras.models import Sequential from", "the selection.\") ######### Reading in the input images ######## SubperdB=[] for sub in" ]